I'm using jquery.csv.min.js to create an array based on a csv. This part works fine. However, when I try and parse that 2d array into sessionStorage items, I'm getting the dreaded "Uncaught TypeError: Cannot read property '0' of undefined". Here's what I have so far:
var stationData;
var station = "";
// Gather data from CSVs
$.ajax({
type: "GET",
url: "document.csv",
dataType: "text",
success: function(response){
stationData = $.csv.toArrays(response);
console.log(stationData); // This works and confirms the array in the console
console.log("Station Variables Success!");
}
});
// Parse array into sessionStorage items
$(document).ready(function() {
setTimeout(() => {
station = getUrlParameter('stn');
var v;
var varLen = stationData[0].length;
for(v = 0; v < varLen; v++) {
sessionStorage.setItem(stationData[0][v], stationData[station][v]); // Line producing error
console.log("Setting sessionStorage:" + stationData[0][v] + " to \"" + stationData[station][v] + "\"");
}}, 2000);
});
When I run this on my local XAMPP Apache server, I was getting the same TypeError until I included the setTimeout() to space it out a bit figuring it was trying to set the storage items before the array could finish loading and it worked. Just a 10ms timeout is enough to run it on my local server, which is why it's there. However, when I try and upload this to our live server, the TypeError comes back. I've tried increasing the timeout in increments upto 10000ms but it's still happening. I've googled around with no luck, so I'm hoping someone here might be able to share some insight!
It's throwing TypeError because you are trying to read the value of an asynchronous operation before its returned by the server.
Increasing the timeout is not the option.
You just need to slightly refactor your code like this -
$(document).ready(function() {
function fetchCSVFromServer(successCb, errorCb) {
return $.ajax({
type: "GET",
url: "document.csv",
dataType: "text"
})
.done(successCb)
.fail(errorCb);
}
var successCb = function(response){
// Parse array into sessionStorage items
var stationData = $.csv.toArrays(response);
console.log(stationData); // This works and confirms the array in the console
console.log("Station Variables Success!");
setTimeout(() => {
var station = getUrlParameter('stn');
var varLen = stationData[0].length;
for(let v = 0; v < varLen; v++) {
sessionStorage.setItem(stationData[0][v], stationData[station][v]); // Line producing error
console.log("Setting sessionStorage:" + stationData[0][v] + " to \"" + stationData[station][v] + "\"");
}
}, 2000);
};
var errorCb = function(jqXHR, textStatus, errorThrown) {
console.error(jqXHR);
};
fetchCSVFromServer(successCb, errorCb);
});
This code has 2 parts to it, first making the XHR using the $.ajax in the fetchCSVFromServer function and the second synchronizing the code flow which follows the asynchronous XHR call which is written in the successCb callback.
The trick is to synchronize the callback to run only after the server has sent its response in the .done callback.
This is a well known and a solved problem, for more details refer this question.
Related
I want to get some data about places using the Google Places API.
Thing is, I want to get data from more than 1000 records, per city of the region I'm looking for.
I'm searching for pizzeria, and I want all the pizzerias in the region I've defined. So I have an array like this:
['Pizzeria+Paris','Pizzeria+Marseille','Pizzeria+Nice','Pizzeria+Toulouse']
My objective is to make a single request, then wait 3sec(or more), and then process the second request. I'm using Lodash library to help me iterate.
Here is my code:
function formatDetails(artisan){
var latitude = artisan.geometry.location.lat;
var longitude = artisan.geometry.location.lng;
var icon = artisan.icon;
var id = artisan.id;
var name = artisan.name;
var place_id = artisan.place_id;
var reference = artisan.reference;
var types = artisan.types.toString();
$('#details').append('<tr>'+
'<td>'+latitude+'</td>'+
'<td>'+longitude+'</td>'+
'<td>'+icon+'</td>'+
'<td>'+id+'</td>'+
'<td>'+name+'</td>'+
'<td>'+place_id+'</td>'+
'<td>'+reference+'</td>'+
'<td>'+types+'</td>'+
'</tr>');
}
var getData = function(query, value){
$.ajax({
url: query,
type: "GET",
crossDomain: true,
dataType: "json",
success: function(response) {
var artisan = response.results;
console.log(artisan);
for (var i = 0; i < artisan.length; i++){
formatDetails(artisan[i]);
setTimeout(function(){console.log('waiting1');},3000);
}
setTimeout(function(){console.log('waiting2');},3000);
},error: function(xhr, status) {
console.log(status);
},
async: false
});
}
$(document).ready(function(){
var places =
['Pizzeria+Paris','Pizzeria+Marseille','Pizzeria+Nice','Pizzeria+Toulouse'];
_.forEach(places, function(value, key) {
var proxy = 'https://cors-anywhere.herokuapp.com/';
var target_url = 'https://maps.googleapis.com/maps/api/place/textsearch/json?query='+value+'&key=AIzaSyAClTjhWq7aFGKHmUwxlNUVBzFpIKTkOrA';
var query = proxy + target_url;
getData(query, value);
});
});
I've tried a lot of solutions I found on stackoverflow, but no one were working, or I might have done them wrong.
Thanks for your help!
The fact that $.ajax returns a Promise makes this quite simple
Firstly, you want getData to return $.ajax - and also get rid of async:false
var getData = function(query, value) {
return $.ajax({
url: query,
type: "GET",
crossDomain: true,
dataType: "json",
success: function(response) {
var artisan = response.results;
for (var i = 0; i < artisan.length; i++){
formatDetails(artisan[i]);
}
},error: function(xhr, status) {
console.log(status);
}
});
}
Then, you can use Array.reduce iterate through the array, and to chain the requests together, with a 3 second "delay" after each request
Like so:
$(document).ready(function(){
var places = ['Pizzeria+Paris','Pizzeria+Marseille','Pizzeria+Nice','Pizzeria+Toulouse'];
places.reduce((promise, value) => {
var proxy = 'https://cors-anywhere.herokuapp.com/';
var target_url = 'https://maps.googleapis.com/maps/api/place/textsearch/json?query='+value+'&key=AIzaSyAClTjhWq7aFGKHmUwxlNUVBzFpIKTkOrA';
var query = proxy + target_url;
return promise.then(() => getData(query, value))
// return a promise that resolves after three seconds
.then(response => new Promise(resolve => setTimeout(resolve, 3000)));
}, Promise.resolve()) /* start reduce with a resolved promise to start the chain*/
.then(results => {
// all results available here
});
});
The most effective answer is the one above from #jaromandaX.
Nevertheless, I also found a workaround with Google Chrome, which will help you to not get your hands dirty with promises.
On Chrome:
1. Open Console
2. Go to network tab
3. Near the options "preserve log" and "disable cache", you have an option with an arrow where you will see the label "No throttling".
4.Click on the arrow next to the label, then add.
5. You will be able to set a download and upload speed, and most important, delay between each request.
Kaboom, working with my initial code.
Nevertheless, I changed my code to fit the above answer, which is better to do, in terms of code, speed, etc..
Thanks
I've read a good bit about callbacks, and while I use them for click events and similar, I'm using them without fully understanding them.
I have a simple web app with 3 or 4 html pages, each with its own js page.
I have some global functions that I've placed in a new js page which is referenced by each html page that needs it. I'm using this file, word_background.js, to hold functions that are lengthy and used by multiple pages.
pullLibrary is a function, residing in word_background.js, that pulls from my db and processes the results.
I want to call pullLibrary from webpageOne.html, make sure it completes, then do more processing in webpageOne.js.
In webpageOne.js I have the following - trying to call pullLibrary and, once it is complete, use the results for further work in webpageOne.js.
The code executes pullLibrary (in word_background.js) but doesn't "return" to webpageOne.js to continue processing.
I'm assuming I'm missing some critical, essential aspect to callbacks...
I just want to run the pullLibrary function (which has ajax calls etc) and, once it is complete, continue with my page setup.
Any explanation/correction appreciated.
This code is in webpageOne.js:
pullLibrary(function(){
console.log('Now processing library...');
processLibrary();
updateArrays();
//Do a bunch more stuff
});
----- UPDATE -----
Thank you for the comments...which I think are illuminating my broken mental model for how this should work.
pullLibrary is an ajax function - it pulls from a database and stuffs the results into an array and localStorage.
My expectation is that I can call pullLibrary and, when it is complete, the callback code (in this case anonymous function) will run.
function pullLibrary(){ //Values passed from startup() if no data is local
//Pull data from database and create basic LIBRARY array for further processing in processLibrary sub
console.log("Starting to pull library array in background.js..." + "User: " + localStorage.userID + " License: " + localStorage.licType);
var url1 = baseURL + 'accessComments3.php';
var url2 = '&UserID=' + localStorage.userID + '&LicType=' + localStorage.licType;
//Need global index to produce unique IDs
var idIndex = 0;
var index = 0;
$.ajax({
type: "POST",
url: url1,
data: url2,
// dataType: 'text',
dataType: 'json',
success: function(result){
// success: function(responseJSON){
arrLibrary = result; //store for use on this page
localStorage.library = JSON.stringify(result); //Store for use elsewhere
console.log('Saving to global variable: ') + console.log(arrLibrary);
//Now mark last update to both sync storage and local storage so access from other browsers will know to pull data from server or just use local arrays (to save resources)
var timeStamp = Date.now();
var temp = {};
temp['lastSave'] = timeStamp;
// chrome.storage.sync.set(temp, function() {
console.log('Settings saved');
localStorage.lastSync = timeStamp;
console.log('Last update: ' + localStorage.lastSync);
//Store Group List
var arrComGroups = $.map(arrLibrary, function(g){return g.commentGroup});
// console.log('List of comment groups array: ') + console.log(arrComGroups);
arrComGroups = jQuery.unique( arrComGroups ); //remove dupes
// console.log('Unique comment groups array: ') + console.log(arrComGroups);
localStorage.groupList = JSON.stringify(arrComGroups); //Store list of Comment Groups
//Create individual arrays for each Comment Groups
$.each(arrComGroups,function(i,gName){ //Cycle through each group of Comments
var arrTempGroup = []; //to hold an array for one comment group
arrTempGroup = $.grep(arrLibrary, function (row, i){
return row.commentGroup == gName;
});
//Store string version of each Comment Array
window.localStorage['group_' + gName] = JSON.stringify(arrTempGroup);
console.log('Creating context menu GROUPS: ' + gName);
});
// processLibrary(arrLibrary); //We've pulled the array with all comments - now hand off to processor
}, //End Success
error: function(xhr, status, error) {
alert("Unable to load your library from 11trees' server. Check your internet connection?");
// var err = eval("(" + xhr.responseText + ")");
// console.log('Error message: ' + err.Message);
}
}); //End ajax
}
Okay, there are tons of "here's how callbacks work" posts all over the internet...but I could never get a crystal clear example for the simplest of cases.
Is the following accurate?
We have two javascript files, one.js and two.js.
In one.js we have a function - lets call it apple() - that includes an Ajax call.
two.js does a lot of processing and listening to a particular html page. It needs data from the apple() ajax call. Other pages are going to use apple(), also, so we don't want to just put it in two.js.
Here's how I now understand callbacks:
one.js:
function apple(callback_function_name){
$.ajax({
type: "POST",
url: url1,
data: url2,
dataType: 'json',
success: function(result){
//apple processing of result
callback_function_name(); //This is the important part - whatever function was passed from two.js
}, //End Success
error: function(xhr, status, error) {
}
}); //End ajax
} //End apple function
** two.js **
This js file has all kinds of listeners etc.
$(document).ready(function () {
apple(function(apple_callback){
//all kinds of stuff that depends on the ajax call completing
//note that we've passed "apple_callback" as the function callback name...which is stored in the apple function as "callback_function_name".
//When the ajax call is successful, the callback - in this case, the function in two.js, will be called back...and the additional code will run
//So the function apple can be called by all sorts of other functions...as long as they include a function name that is passed. Like apple(anothercallback){} and apple(thirdcallback){}
}); //End apple function
}); //End Document.Ready
Updated Question with Code
I have a situation where I am calling two nested ajax calls one after another. The first ajax call submits a form without the attachment. The result of the first ajax call will create a requestId and using second ajax call I have to attach multiple attachments to the created requestId.
The result of below code, both first and second ajax calls are being called N times of attachment. For ex:- If there are 3 attachments, createRequestId ajax call(first ajax call) called 3 times which creates 3 requestIds. My issue is, createRequestId ajax call needs to be called only one time (first time) and during rest of the loop, only the second ajax call should be called. How can I achieve this in the below code?
Current situation
RequestId 1,Attachment 1
RequestId 2,Attachment 2
RequestId 3, Attachment 3
Expected output
RequestId 1, Attachment 1, Attachment 2, Attachment 3
//loop through number of attachments in the form
$("#myDiv").find("input[type=file]").each(function(index,obj) {
var fObj = $(obj),
fName = fObj.attr("name"),
fileDetail = document.getElementById(fName).files[0];
//FileSize Validation
if(fileDetail !=undefined && fileDetail !=null)
{
if(fileDetail.size > 5*Math.pow(1024,2))
{
alert("Please upload the attachment which is less than 5 MB");
return false
}
}
$.ajax({ //First Ajax Call
url: 'http://..../createRequestId'
type:'POST'
data: stringify(formData)
success: function(resObj){
$("#showResponseArea span").removeClass("hide");
$("#showResponseArea span").removeClass("alert-success");
var requestId = resObj.requestId;
if(requestId>1 && fileDetail !=undefined && fileDetail !=null) {
$.ajax({ //Second Ajax Call
url: 'http://..../doAttach?fileName=' + fileDetail.name +
'&requestId=' +requestId,
type:'POST',
data: fileDetail,
success: function(resObj){
alert("Attachment Successful");
}
error : function(data) {
alert("Failed with the attachment");
}
});
}
},
error: funciton(resObj) {
alert("Some Error Occured");
}
});
});
I know this doesn't really answer your question in full, but if you don't mind me offering a little constructive code review. It's hard to really manage and debug code when it's all thrown into one big function with many lines, especially if you're nesting async calls (you're getting close to nested callback hell). There's a reason code like this can get hard to maintain and confusing.
Lets incorporate some Clean Code concepts which is to break these out into smaller named functions for readability, testability, and maintainability (and able to debug better):
First you don't need all those !== and undefined checks. Just do:
if (fileDetail)
and
if(requestId>1 && fileDetail)
that checks for both null and undefined on fileDetail.
Then I’d start to break out those two ajax calls into several named functions and let the function names and their signatures imply what they actually do, then you can remove unnecessary comments in code as well as once you break them out, typically you can find repeated code that can be removed (such as redundant post() code), and you will find that code you extracted out can be tested now.
I tend to look for behavior in my code that I can try to extract out first. So each one of those if statements could easily be extracted out to their own named function because any if statement in code usually translates to "behavior". And as you know, behavior can be isolated into their own modules, methods, classes, whatever...
so for example that first if statement you had could be extracted to its own function. Notice I got rid of an extra if statement here too:
function validateFileSize(fileDetail)
if(!fileDetail && !fileDetail.size > 5*Math.pow(1024,2)){
alert("Please upload the attachment which is less than 5 MB");
return false
};
};
So here's how you could possibly start to break things out a little cleaner (this could probably be improved even more but here is at least a start):
$("#myDiv").find("input[type=file]").each(function(index,obj) {
var fObj = $(obj),
fileName = fObj.attr("name"),
file = document.getElementById(fileName).files[0];
validateFileSize(file);
post(file, 'http://..../createRequestId');
});
// guess what, if I wanted, I could slap on a test for this behavior now that it's been extracted out to it's own function
function validateFileSize(file){
if(!file && !file.size > 5*Math.pow(1024,2)){
alert("Please upload the attachment which is less than 5 MB");
return false
};
};
function post(url, data){
$.ajax({
url: url,
type:'POST',
data: stringify(data),
success: function(res){
showSuccess();
var id = res.requestId;
if(id > 1 && file){
var url = 'http://..../doAttach?fileName=' + file.name + '&requestId=' + id;
postData(file, url);
}
},
error: function(err) {
alert("Some Error Occurred: " + err);
}
});
// I didn't finish this, and am repeating some stuff here so you could really refactor and create just one post() method and rid some of this duplication
function postData(file, url){
$.ajax({
url: url,
type:'POST',
data: file,
success: function(res){
alert("Attachment Successful");
},
error : function(data) {
alert("Failed with the attachment");
}
});
};
// this is behavior specific to your form, break stuff like this out into their own functions...
function showSuccess() {
$("#showResponseArea span").removeClass("hide");
$("#showResponseArea span").removeClass("alert-success");
};
I'll leave it here, next you could get rid of some of the duplicate $ajax() code and create a generic post() util method that could be reused and move any other behavior out of those methods and into their own so that you can re-use some of the jQuery ajax call syntax.
Then eventually try to incorporate promises or promises + generators chain those async calls which might make it a little easier to maintain and debug. :).
I think your loop is simply in the wrong place. As it is, you're iterating files and making both AJAX calls once.
Edit: I now show the appropriate place to do extra validations before the first AJAX call. The actual validation was not part of the question and is not included, but you can refer to JavaScript file upload size validation.
var fileSizesValid = true;
$("#myDiv").find("input[type=file]").each(function(index, obj) {
// First loop checks file size, and if any file is > 5MB, set fileSizesValid to false
});
if (fileSizesValid) {
$.ajax({ //First Ajax Call
url: 'http://..../createRequestId',
type: 'POST',
data: stringify(formData),
success: function(resObj) {
var fObj = $(obj),
fName = fObj.attr("name"),
fileDetail = document.getElementById(fName).files[0];
//loop through number of attachments in the form
$("#myDiv").find("input[type=file]").each(function(index, obj) {
$("#showResponseArea span").removeClass("hide");
$("#showResponseArea span").removeClass("alert-success");
var requestId = resObj.requestId;
if (requestId > 1 && fileDetail != undefined && fileDetail != null) {
$.ajax({ //Second Ajax Call
url: 'http://..../doAttach?fileName=' + fileDetail.name +
'&requestId=' + requestId,
type: 'POST',
data: fileDetail,
success: function(resObj) {
alert("Attachment Successful");
},
error: function(data) {
alert("Failed with the attachment");
}
});
}
})
},
error: function(resObj) {
alert("Some Error Occured");
}
});
}
As a side note, take care where you place your braces. In JavaScript your braces should always be at the end of the line, not the start. This is not a style preference thing as it is most languages, but an actual requirement thanks to semicolon insertion.
Try following code (Just a re-arrangement of your code and nothing new):
//loop through number of attachments in the form
var requestId;
$("#myDiv").find("input[type=file]").each(function(index,obj) {
var fObj = $(obj),
fName = fObj.attr("name"),
fileDetail = document.getElementById(fName).files[0];
//FileSize Validation
if(fileDetail !=undefined && fileDetail !=null)
{
if(fileDetail.size > 5*Math.pow(1024,2))
{
alert("Please upload the attachment which is less than 5 MB");
return false
} else if(!requestId || requestId <= 1){
$.ajax({ //First Ajax Call
url: 'http://..../createRequestId'
type:'POST'
data: stringify(formData)
success: function(resObj){
$("#showResponseArea span").removeClass("hide");
$("#showResponseArea span").removeClass("alert-success");
requestId = resObj.requestId;
secondAjaxCall(fileDetail);
},
error: funciton(resObj) {
alert("Some Error Occured");
}
});
} else if(requestId>1) {
secondAjaxCall(fileDetail);
}
}
});
function secondAjaxCall(fileDetail) {
$.ajax({ //Second Ajax Call
url: 'http://..../doAttach?fileName=' + fileDetail.name +
'&requestId=' +requestId,
type:'POST',
data: fileDetail,
success: function(resObj){
alert("Attachment Successful");
}
error : function(data) {
alert("Failed with the attachment");
}
});
}
I am getting different errors in FF, Chrome and IE, but it all boils down there is an error with the data in $.ajax. Following is the code. Please go easy if I made a dumb mistake. I have spent hours researching this and can't figure it out. Any help appreciated.
Edited to include the error messages
FF Error message: NS_ERROR_XPC_BAD_CONVERT_JS: Could not convert JavaScript argument
Chrome Error message:Uncaught TypeError: Illegal invocation
IE9 Error message: SCRIPT65535: Argument not optional
Here is the code
mc.mc_data.click_tracking = [];
var sequence = 0;
var send_it;
// the container click event will record even extraneous clicks. need to change it to extending the jquery on click handler
$('#container').on('click', function(event) {
logClicks(event);
if(!send_it){
sendIt()
}
sequence++;
});
function sendIt(){
var tracking = mc.mc_data.click_tracking;
var url = '/ajax/click_trackin';
console.log("clicks["+sequence+"] "+$.isArray(tracking));
$.each(tracking, function(i,v){
console.log(i + v.innerText + " - " + v.sequence);
});
send_it = window.setInterval(function(){
$.ajax({
type: 'POST',
url: url,
data: {
clicks:tracking
},
success: function(response)
{
if(response.result.length<1){
console.log(response+ ': no response');
}else{
console.log(response);
tracking = mc.mc_data.click_tracks = [];
}
mc.mc_data.click_tracks = [];
clearInterval(send_it);
sendIt();
},
error: function(a, b, c){
console.log(a+" - " + b+" - "+ c);
clearInterval(send_it);
}
});
}, 5000);
}
//
function logClicks(e){
var temp_click = {
'business_id':window.mc.businessid,
'userid':window.mc.userid,
'timestamp':e.timeStamp,
'leg':window.mc.currentLeg,
'workflow': 'dummy data',
'sequence': sequence,
'type':e.type,
'target':e.target,
'parent': e.target.parentElement,
'id':e.target.id,
'class':e.className,
'innerText': $(e.target).text()
}
mc.mc_data.click_tracking.push(temp_click);
}
For data, you are meant to pass an object which will later be converted into a query string. You are passing the variable tracking, which contains stuff like e.target.parentElement, which is a DOM Node, containing really a lot of further properties (like other DOM Nodes!). The error can originate from either having problems converting a DOM Node into a query string, or creating a way too long query string. It would not make much sense to send a DOM Node to the server anyways.
Only send what is necessary and can be reasonably converted to a query string.
I'm looping through an array, and during each iteration of the loop, I'm calling a url through ajax. I'd like to also update an .innerHTML such that it displays to keep the user informed as to which iteration of the loop is being processed. However, .innerHTML only displays the update when the script completes.
How can I make this notification display during my loop?
I'm also using the query ajax setting 'async: false'. I don't want to hammer my server with processing all of the ajax requests at once, as they are encoding video files which is CPU intensive. I don't really want to lock the browser up waiting for synchronous requests to complete either.
Is there a better way to do this?
My ultimate goal is to sequentially execute my combine.php script for each set of videos, while displaying a current status indicator to the user, and while not locking the browser up in the process. Your help is appreciated!
Code snippet here:
// process the loop of videos to be combined
var status = document.getElementById('currentStatus');
for (i=0; i< count; i++) {
// change the display
var fields = videos[i].split(":", 2);
current = i +1;
currentStatus.innerHTML = "<b>Multi-part Videos:</b> <h3 class='status'>Currently Updating Bout #" + fields[1] + " (" + current + " of " + count + " videos)</h3>";
// run the combine
var dataString = 'videoId='+ fields[0];
$.ajax({
type: "POST",
url: "combine.php",
data: dataString,
success: function(txt) {
//deselect the checkbox
document.combine.video[selected[i]].checked = false;
},
async: false
});
async: false will hang the entire browser until the ajax request completes. That is why you don't see the page update on each loop iteration.
Synchronous ajax requests typically make for terrible UX (do you like the page to freeze inexplicably?) and there is almost always a better way to do it. Since you're using jQuery, the Deferred Object API makes this easy.
As others have alluded, your problem is caused because JavaScript is single threaded - while the single JS thread is waiting for your ajax request to return, it's not allowed to update the UI.
You can get around this by changing the request to async, and using the callback to trigger the request for the next object:
// trigger the loop of videos to be combined
var status = document.getElementById('currentStatus');
processVideo( 0 );
function processVideo( index ) {
var fields = videos[index].split(":", 2);
currentStatus.innerHTML = "<b>Multi-part Videos:</b> <h3 class='status'>Currently Updating Bout #" + fields[1] + " (" + current + " of " + count + " videos)</h3>";
// run the combine
var dataString = 'videoId='+ fields[0];
$.ajax({
type: "POST",
url: "combine.php",
data: dataString,
success: function() {
processResponse( index);
},
async: true
});
}
function processResponse( index ) {
// this method is called each time the ajax request finishes
if (index++ < count) {
//deselect the checkbox
document.combine.video[selected[index]].checked = false;
processVideo( index );
}
}
If you want to update one by one while async is set to true, the next request can be put in the success callback function. The update status code should be inside that function too.
function ajaxRequest(i){
// other processing
.............
$.ajax({
type: "POST",
url: "combine.php",
data: dataString,
success: function(txt) {
//deselect the checkbox
document.combine.video[selected[i]].checked = false;
// update status
currentStatus.innerHTML = .....
// make next request
if(i<lastOne){
ajaxRequest(i+1);
}
},
async: true
});
}