var voted;
$.getJSON("http://smart-ip.net/geoip-json?callback=?", function(data){
voted = data.host;
console.log(voted);
});
console.log(voted);
So voted is undefined outside, however, it's defined properly inside the function.
I'm wondering how I can use the data.value outside the function. The only thing I can think of is using a global variable, however, it doesn't work as expected.
Edit: Need the IP outside of functions
$.getJSON("http://smart-ip.net/geoip-json?callback=?", function(data){
voted(data.host);
});
function voted(ip) {
ip_voted = ip_array.indexOf(ip);
if (ip_voted > -1) {
window.location.href = "results";
}
}
if (choice && ip == '123123123') {
}
you can use this way
$.getJSON("http://smart-ip.net/geoip-json?callback=?", function(data){
writeVoted(data.host);
});
function writeVoted(voted) {
console.log(voted);
}
or leave a synchronous call (I believe not be advisable)
async (default: true)
Type: Boolean
By default, all requests are sent asynchronously (i.e. this is set to true by default). If you need synchronous requests, set this option to false. Cross-domain requests and dataType: "jsonp" requests do not support synchronous operation. Note that synchronous requests may temporarily lock the browser, disabling any actions while the request is active. As of jQuery 1.8, the use of async: false with jqXHR ($.Deferred) is deprecated; you must use the success/error/complete callback options instead of the corresponding methods of the jqXHR object such as jqXHR.done() or the deprecated jqXHR.success().
The reason it doesn't work out for you is that the call to getJSON is an asynchronous call. In sense it means that it is ripped out of the current flow and executed somewhere independently. Take a look at the comments I have added to your code below:
var voted;
//getJSON is executed right away - async
$.getJSON("http://smart-ip.net/geoip-json?callback=?", function(data){
//This callback function executes when the remote request responds. The timeframe is variable.
voted = data.host;
console.log(voted);
});
//Gets executed right away - doesn't wait for getJSON to get the remote response.
//voted is probably still undefined.
console.log(voted);
What you need to to is add a function that continues your flow, as shown below:
$.getJSON("http://smart-ip.net/geoip-json?callback=?", function(data){
voted(data.host);
});
function voted(ip) {
ip_voted = ip_array.indexOf(ip);
if (ip_voted > -1) {
window.location.href = "results";
}
verify(ip);
}
function verify(ip) {
if (choice && ip == '123123123') {
}
}
If you really need to use the data in a global variable, you could "wait" for the response - however I strongly advise against doing this:
var host, value;
var responseArrived = false;
$.getJSON("http://smart-ip.net/geoip-json?callback=?", function(data){
host = data.host;
value = data.value;
responseArrived = true;
});
while( !responseArrived ) {
//NOT A GOOD IDEA!
//Will continue to loop until the callback of getJSON is called.
//NOT A GOOD IDEA!
}
console.log(host);
console.log(value);
Related
I am having problems understanding how I can wait for the results of multiple functions that have ajax in them.
I tried using Promise.all() or $.when().done(); These functions only call another function when they are done and I would like to avoid this method because it will infinitely complicate may code.
I also don't want to use async: false in the ajax call.
The main function doesn't even have too look like that.
I only need to know if there is a method that can call 1 or more function that have ajax then wait for the results without continuing in another function.
function main(){
//some functions
UploadFile('input_1');
UploadFile('input_2');
.
.
.
UploadFile('input_n');
//Here is where I want to know all the results of the UploadFile functions
//some other functions
//return true or false dependig on //some functions, UploadFile AND //some other functions
}
function UploadFile(inputId){
return $.ajax({
//ajax parameters
success: function(IDUpload) {
if (IDUpload > 0) {
return true;
}
return false;
},
error: function(error) {
return false;
}
});
}
Edit:
The main() function is the validation function for the form. It seems that if I make it async, it wil not fire at all it will not wait for then UploadFile calls.
You can use the await prefix, to make any asynchronous function or result to be resolved synchronously, by halting the function until the promise resolved to either success or an error.
To use the await prefix, you need to declare the function containing the await prefixes with async, for the runtime to prepare for the possible situation to await for a promise.
Further Information can be read in the MDN Documentation:
Await Documentation
// We need to define the Function as an asynchronous function, to apply await inside
async function main(e){
// Added for preventing the submit to propagate to the action-URL
e.preventDefault();
e.stopPropagation();
// As the UploadFile now only uses the synchronous Ajax-Call we can just use the returning
// result of the Function.
let res = UploadFile('input_1');
console.log(res);
// ... Other code
}
// We use the async: false Property of the Jquery Ajax Call, thous getting the response as a
// result.
function UploadFile(inputId){
let res = $.ajax({
method: "GET",
async: false,
url:"https://www.random.org/integers/?num=1&min=-10&max=10&col=1&base=10&format=plain&rnd=new",
});
// As the Ajax Call is now synchronous we have to check if the Request was successfull
// ourself. For this, we check for the Response status. Any status between 200 - 299 is
// successfull and any other has any error depending on the status code. But we wont bother
// much which error was thrown.
if(res.status >= 300 || res.status < 200) {
console.error("Request Error:", res.statusText)
return false;
}
// With the guarding If-Check before we can surely assume, the Request was successfull and
// can check now with the same logic as in the success-Method before. I've inverted the
// checks to keep consitent with the guarding If-statements.
// Same as: if(IDUpload < 0)
if(res.responseText < 0) {
console.log("The Response is not 'valid'");
return false;
}
console.log("The Response is 'valid'");
return true;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
<form onsubmit="main(event)">
<button type="submit">Submit Form</button>
</form>
How to execute three services in a synchronous way in AngularJS? I have three $http.get() services and on its success have to read the JSON fields and if the particular set of fields have valid data,a flag has to be set to true/false and depending on the flag result the next service will be called otherwise not.But here, the services are running asynchronously so my logic is failing.
Sample Code:
// Condition 1
if(item === false) {
var product = Service1.get().then(function (response) {
// Logic for reading the JSON
// Setting the flag based on it..
item = true/false;
}
//Condition 2
if(item === false) {
var call = Service2.get().then(function (data) {
// Logic for reading the JSON
// Setting the flag based on it..
item = true/false;
}
}
// Condition 3
if(item === false) {
var product = Service3.get().then(function (response) {
// Logic for reading the JSON
// Setting the flag based on it..
item = true/false;
}
}
}
Here, the problem is that code in *Condition3* is getting executed first then code in *Condition1* and *Condition2* which is causing the unexpected results.
It would be of great help if someone has the sample code in which three services are executed in a sequential manner.
Instead of executing a new $http request in the success handler and write cascades of requests, perhaps you can solve it in a recursive way:
function recursiveHttp(array) {
$http(array[0].config).then(function() {
array[0].success();
recursiveHttp(array.splice(0,1));
}, function() {
array[0].error();
recursiveHttp(array); //beware: no escape clause
});
}
Where the array is a collection of objects that contain the required config object and two callback functions.
{
config : {
method: 'get',
url: 'myurl'
},
success: function() {
//do stuff
},
error: function() {
//do stuff
}
}
There are 2 ways to achieve what you want (as far as i can make out from your question):
change the behavior of $http.get to async by using async : true.
chain your requests properly so that one executes only after the other, so that each has their dependencies met before they start executing. this ca be done by calling the dependent function on the callback of the first one.
Not sure if my question is subjective/objective but as a JavaScript newbie i'm encountering this problem quite a lot. So here I go.
I'm used to write C#, so my JavaScript structure looks like C#. And just that, that gives problems I think ;-)
Let's give a simple example where I met my problem again today:
MyLibrary.fn.InitAddEntityForm = function () {
$('a#btnAddEntity').click(function () {
//post data and receive object with guid and isPersisted boolean
var persistedObject = MyLibrary.fn.CheckAndSendAddEntityForm("name", "avatarurl.png");
console.log("test");
//check if persisted and go to next step
if (persistedObject.isPersisted) {
MyLibrary.fn.InitAddAnotherEntityForm(persistedObject.gdEntityId);
} else {
alert("Oops, something went wrong. Please call 911");
}
});
};
//////*****/////
//SOME FUNCTION THAT SENDS MY FORM AND RETURNS AN OBJECT WITH TRUE VALUE AND POSTED ENTITY ID
/////*****//////
MyLibrary.fn.CheckAndSendAddForm = function (txtName, ImageUrl) {
var postUrl = "/admin/add";
var persistedObject = new Object();
$.post(
postUrl,
{ Name: txtName, ImageUrl: txtImageUrl},
function (data) {
if (data.Status == 200) {
console.log("Post status:" + data.Message);
persistedObject.isPersisted = true;
persistedObject.gdEntityId = data.Data;
} else if (data.Status == 500) {
console.log("Failed to post entitiy");
} else {
console.log("Fault with Javascript");
}
}, "json"
);
return persistedObject;
};
Okay, thats it. Everything looks okay right? Browser says no.
I tried to debug it using firebug, looping over my code line by line, and that way the browser does what I want: Execute a new function to show the next panel in my wizard.
After placing a lot of Console.logs() in my code I figured out that this must be something about timing in JavaScript. In C# the code executes line by line, but apparently JavaScript doesn't.
By placing that Console.log("test") I noticed that "test" appeared in my console before "Post status: Success!".
So here's my question, how should I write my JavaScript code so I have control over the way the browser executes my code?
Should I really replace the code below to the end of my CheckAndSendAddEntityForm()?
//check if persisted and go to next step
if (persistedObject.isPersisted) {
MyLibrary.fn.InitAddAnotherEntityForm(persistedObject.gdEntityId);
} else {
alert("fout");
}
Is this how I have to write JavaScript: One big domino effect or am I just doing something wrong?
$.post is a shortcut for an AJAX call, AJAX is by definition asynchronous, which means it won't wait on a response before continuing processing. If you switch it to a regular AJAX() method, there is an async option you can set to false, which will make it behave as you are expecting.
Alternatively you can also define a function to execute on successful return of the AJAX request, in which you can call the next step in your process chain.
The AJAX call is asychronous; that means that the callback method exposes by $.post will be executed when the request completes, but your javascript will continue executing as soon as the invoke to $.post finishes. If you want to do something after the ajax call is done, you need to provide a callback method and do something else, ex:
MyLibrary.fn.CheckAndSendAddForm = function (txtName, ImageUrl, callback) {
var postUrl = "/admin/add";
var persistedObject = new Object();
$.post(
postUrl,
{ Name: txtName, ImageUrl: txtImageUrl},
function (data) {
if (data.Status == 200) {
console.log("Post status:" + data.Message);
persistedObject.isPersisted = true;
persistedObject.gdEntityId = data.Data;
} else if (data.Status == 500) {
console.log("Failed to post entitiy");
} else {
console.log("Fault with Javascript");
}
callback(); // This is where you return flow to your caller
}, "json"
);
};
Then you invoke like so:
var persistedObject = MyLibrary.fn.CheckAndSendAddEntityForm("name", "avatarurl.png", function()
{
console.log("test");
//check if persisted and go to next step
if (persistedObject.isPersisted) {
MyLibrary.fn.InitAddAnotherEntityForm(persistedObject .gdPronoId);
} else {
alert("Oops, something went wrong. Please call 911");
}
});
JavaScript is single-threaded. If you have asynchronous functionality, a simple boolean semaphore variable will help not to allow invocations of a function while some processes are running.
If you want to execute asynchronous tasks one by one (like a domino line), you will need to use callback functions.
What you're encountering is the "asynchronous" bit of AJAX. If you want to physically (as in the line line by line in the Javascript file) you can use the .success,.pipe or .done jQuery methods to add a callback to process the data further. Don't embed your callbacks if you can help it, or you will get a "domino effect" as you call it.
how do i make data overwrite results variable ?
var ajax = {
get : {
venues : function(search){
var results = "#";
$.getJSON("http://x.com/some.php?term="+search+"&callback=?",function(data){ results = data; });
return results;
}
}
};
data is overwriting results, just after results has been returned.
You can use the ajax function instead of getJSON, since getJSON is just shorthand for
$.ajax({
url: url,
dataType: 'json',
data: data,
success: callback
});
and then also set async to false, so that the call will block.
However, in your case this won't work, because JSONP requests (with "?callback=?") cannot be synchronous.
The other (better) option is to have whatever code is dependent on the results return value get called by the success callback.
So, instead of something like this:
var results = ajax.get.venues('search');
$('#results').html(translateResults(results));
Maybe something like this:
ajax.get.venues('search', function (results) {
$('#results').html(translateResults(results));
});
venues = function (search, callback) {
$.getJSON("http://x.com/some.php?term="+search+"&callback=?",
function(data){
callback(data);
});
};
Your problem is the asynchronous nature of JavaScript. results does get overwritten, but only later, after the function has already exited, because the callback is executed when the request has finished.
You would have to make the Ajax call synchronous using sync: true (this is usually not a good idea, just mentioning it for completeness's sake) or restructure your code flow so it doesn't depend on the return value any more, but everything you need to do gets done in the callback function.
This isn't a scope problem. It's because $.getJSON is asynchronous; results is returned before $.getJSON finishes. Try making a callback for $.getJSON to call when it's done.
function JSON_handler(data){
// do stuff...
}
$.getJSON("http://x.com/some.php?term="+search+"&callback=?", JSON_handler);
You could put the logic you want to run in a callback.
var ajax = {
get : {
venues : function(search, fnCallback){
var results = "#";
$.getJSON("http://x.com/some.php?term="+search+"&callback=?",
function(data){
// success
results = data;
(typeof fnCallback == 'function') && fnCallback(data);
});
return results;
}
}
};
ajax.get.venues(term, function(result){
// Do stuff with data here :)
})
functional programming can be fun.
I stumbled on a piece of Ajax code that is not 100% safe since it's mixing asynchronous/synchronous type of code... so basically in the code below I have a jQuery.each in which it grabs information on the elements and launch an Ajax get request for each:
$(search).each(function() {
$.ajax({
url: 'save.x3?id='+$(this).attr("id")+'value='$(this).data("value");
success: function(o){
//Update UI
},
error: function(o){
//Update UI
}
});
});
//code to do after saving...
So obviously the 'code to do after saving...' often gets executed before all the requests are completed. In the ideal world I would like to have the server-side code handle all of them at once and move //code to do after saving in the success callback but assuming this is not possible, I changed the code to something like this to make sure all requests came back before continuing which I'm still not in love with:
var recs = [];
$(search).each(function() {
recs[recs.length] = 'save.x3?id='+$(this).attr("id")+'value='$(this).data("value");
});
var counter = 0;
function saveRecords(){
$.ajax({
url: recs[counter],
success: function(o){
//Update progress
if (counter<recs.length){
counter++;
saveRecords();
}else{
doneSavingRecords();
}
},
error: function(o){
//Update progress
doneSavingRecords(o.status);
}
});
}
function doneSavingRecords(text){
//code to do after saving...
}
if (recs.length>0){
saveRecords(); //will recursively callback itself until a failed request or until all records were saved
}else{
doneSavingRecords();
}
So I'm looking for the 'best' way to add a bit of synchronous functionality to a series of asynchronous calls ?
Thanks!!
Better Answer:
function saveRecords(callback, errorCallback){
$('<div></div>').ajaxStop(function(){
$(this).remove(); // Keep future AJAX events from effecting this
callback();
}).ajaxError(function(e, xhr, options, err){
errorCallback(e, xhr, options, err);
});
$(search).each(function() {
$.get('save.x3', { id: $(this).attr("id"), value: $(this).data("value") });
});
}
Which would be used like this:
saveRecords(function(){
// Complete will fire after all requests have completed with a success or error
}, function(e, xhr, options, err){
// Error will fire for every error
});
Original Answer: This is good if they need to be in a certain order or you have other regular AJAX events on the page that would affect the use of ajaxStop, but this will be slower:
function saveRecords(callback){
var recs = $(search).map(function(i, obj) {
return { id: $(obj).attr("id"), value: $(obj).data("value") };
});
var save = function(){
if(!recs.length) return callback();
$.ajax({
url: 'save.x3',
data: recs.shift(), // shift removes/returns the first item in an array
success: function(o){
save();
},
error: function(o){
//Update progress
callback(o.status);
}
});
}
save();
}
Then you can call it like this:
saveRecords(function(error){
// This function will run on error or after all
// commands have run
});
If I understand what you're asking, I think you could use $.ajaxStop() for this purpose.
This is easily solved by calling the same function to check that all AJAX calls are complete. You just need a simple queue shared between functions, and a quick check (no loops, timers, promises, etc).
//a list of URLs for which we'll make async requests
var queue = ['/something.json', '/another.json'];
//will contain our return data so we can work with it
//in our final unified callback ('displayAll' in this example)
var data = [];
//work through the queue, dispatch requests, check if complete
function processQueue( queue ){
for(var i = 0; i < queue.length; i++){
$.getJSON( queue[i], function( returnData ) {
data.push(returnData);
//reduce the length of queue by 1
//don't care what URL is discarded, only that length is -1
queue.pop();
checkIfLast(displayAll(data));
}).fail(function() {
throw new Error("Unable to fetch resource: " + queue[i]);
});
}
}
//see if this is last successful AJAX (when queue == 0 it is last)
//if this is the last success, run the callback
//otherwise don't do anything
function checkIfLast(callback){
if(queue.length == 0){
callback();
}
}
//when all the things are done
function displayAll(things){
console.log(things); //show all data after final ajax request has completed.
}
//begin
processQueue();
Edit: I should add that I specifically aimed for an arbitrary number of items in the queue. You can simply add another URL and this will work just the same.
>> In the ideal world I would like to have the server-side code handle all of them at once and move //code to do after saving in the success callback
You'll need to think about this in terms of events.
Closure's net.BulkLoader (or a similar approach) will do it for you:
http://closure-library.googlecode.com/svn/trunk/closure/goog/docs/class_goog_net_BulkLoader.html
http://closure-library.googlecode.com/svn/trunk/closure/goog/docs/closure_goog_net_bulkloader.js.source.html
See:
goog.net.BulkLoader.prototype.handleSuccess_ (for individual calls)
&
goog.net.BulkLoader.prototype.finishLoad_ (for completion of all calls)