I have looked at other questions and answers regarding this, but can't seem to wrap my head around it...
I have a javascript function:
function getStates(theDiv){
var stateGroupData;
var stateData;
var theGHtml = "";
var theHtml = "<h4>MyPage</h4>";
theHtml = theHtml+"<h5>select a state...</h5>";
$.getJSON("getStateGroups.php", function(data) {
stateGroupData = data;
theHtml = theHtml+"<ul>";
$.each(stateGroupData, function(i,jsonData) {
theHtml = theHtml+"<li><a href='#"+jsonData.groupName+"'>"+jsonData.groupID+"</a></li><br/>";
var theSQL = "getStates.php?gid="+jsonData.groupName;
theGHtml = theGHtml+"<div id='"+jsonData.groupName+"'>";
$.getJSON(theSQL, function(data2) {
stateData = data2;
$.each(stateData, function(i,jsonData2) {
alert(jsonData2.stateName);
theGHtml = theGHtml+"<span sname='"+jsonData2.stateName+"' lat='"+jsonData2.centerLat+"' lon='"+jsonData2.centerLon+"' zom='"+jsonData2.zoom+"'>"+jsonData2.stateName+"</span> ";
});
});
theGHtml = theGHtml+"</div>";
});
theHtml = theHtml+"</ul>";
});
theDiv.html = theHtml+theGHtml;
}
The second (ie. nested) getJson does not return any thing... Both PHP files just use PDO to request data from the SAME table. I run the SQL in each file without any issues, so the SQL seems OK.
Is this an sync v. async issue with the calls to getJson?
Is this an sync v. async issue with
the calls to getJson?
Probably. I think this is your problem:
stateData = data2;
Try changing that to:
var stateData = data2;
The first one sets a global variable. The second one sets a variable that is local to that function.
You might benefit from refactoring this whole process such that you only need to make one AJAX call. It looked like you were pulling individual people associated with a group. You'd get better performance on the server from a single script which can, when needed, return people associated with the group but otherwise just returns the group.
Remember, every AJAX call is another hit to your server.
Related
I'm currently working on a simple web scraping nodejs program. It is based on cheerio and I get items from a website and extract some information from there.
As far as I understand it all functions I call inside the foreach loop are sync so they should execute from top to bottom. And because the foreach loop is also only a normal loop, which executes sync in js, the function should return my finished array. But instead it is getting undefined and when I log it inside directly to console it works(?).
function getIntensiv(){
var intensivregister = [];
request.post({url: 'SOMEURL', form: {SOMEFORM}}, function(err,res,body){
var $ = cheerio.load(body);
$('#dataList').children('tbody').children('tr').each(function(i, elem){
var name = $(elem).children('td').first().text().trim().split("\n")[0].trim();
var zipcity = $(elem).children('td').first().children('small').last().text();
var streetnr = $(elem).children('td').first().children('br').last().prev().text();
intensivregister.push({'name': name, 'zipcity': zipcity, 'streetnr': streetnr});
});
console.log(intensivregister); //works and prints the finished array
return intensivregister; //returns undefined before function finished
});
}
I would appreciate it if you could explain me where my mistake is and help me fix it.
function getIntensiv(){
const cheerio = require('cheerio')
const request = require('request')
var intensivregister = [];
request.get({url: 'https://www.w3schools.com/html/html_tables.asp'}, function(err,res,body){
var $ = cheerio.load(body);
$('#customers').children('tbody').children('tr').each(function(i, elem){
var name = $(elem).children('td').first().text().trim().split("\n")[0].trim();
var zipcity = $(elem).children('td').first().children('small').last().text();
var streetnr = $(elem).children('td').first().children('br').last().prev().text();
intensivregister.push({'name': name, 'zipcity': zipcity, 'streetnr': streetnr});
});
console.log(intensivregister); //works and prints the finished array
return null; //returns undefined before function finished
});
return null; //***<---This is returning and not the above return. If no return statement is written then undefined is passed.***
};
var retrunVal = getIntensiv()
console.log(retrunVal);
Please find the highlighted comment
Ok I figured out that my idea of javascript was not how you should use it. I worked around my problem with getting rid of the idea of returning values from functions (which comes mainly from my experiences from async programming) and instead using callback parameters which I give to my function and call at the end of my request.
function getIntensiv(callback){
var intensivregister = [];
request.post(...);
**callback(intensivregister);**
}
What also is working (and I think a better solution) is working with promises e.g. with request-promise and calling the callback in the finally call.
I made a js with data in a const array as below
const messages = [
{ date: '2020-1-1', content:'message1'},
]
In order to make my file cleaner I decide to put the data in a Json file and want to call the Data in my Js in order to use it like before.
my Json is like this
[
{
"date":"2020-1-1",
"content":"message1"
}
]
In order to import my Json I put this code:
let messages = [];
$.getJSON("messages.json", function(data) {
messages = data;
console.log(messages);
});
The result is that my array is loaded in the console but the variable dont work, I tried things with Object.keys but no more result. I dont use framework also and dont find a solution on other questions here. Any help will be appreciated. Thank you very much!
I dont use framework
You are using a library, though, $ === jQuery
in order to make my file cleaner I decide to put the data in a Json file
You can just define a constants.js file and load that before your other scripts.
For example,
constants.js
const messages = [
{ date: '2020-1-1', content:'message1'},
]
main.js
alert(messages);
index.html
<script src="constants.js"></script>
<script src="main.js"></script>
#Alvin Stefanus
Because x,z and messages were undefined and didnt let the code work, I also add
let messages,x,z = [];
And now it works perfectly with your solution.
I will use it as you told also for the other operations.
Thank you very much it helped for my problem and gave me a new technique.
EDIT:
I also tried to delete this part
var start = x;
$.getJSON('messages.json', function(data) {
messages = data;
}); <-- this
var end = z;
And it also works! That means that the problem was not the async function but because I didnt put the loop within the curly bracket of
$.getJSON('messages.json', function(data) {
//The data is finished being filled here
}
Ok this is probably the issue. $.getJSON() is an async function, the code will not wait until the closing curly bracket of the method:
var start = x;
$.getJSON('messages.json', function(data) {
messages = data;
}); <-- this
var end = z;
The code will run var end = z; before the $.getJSON() finished getting the result, because it is asynchronous function. In other word, when the code is currently at var end = z, $.getJSON() is still working to get the data, and has not been finished. That is why the messages = data is not being called yet.
So here is what you want to do:
$.getJSON('messages.json', function(data) {
messages = data;
for (const item of messages) {
if (item.date === todayDay) {
console.log(item.content);
var newPara = document.createElement("p");
var textNode = document.createTextNode(item.content);
newPara.appendChild(textNode);
var nodeParent = document.getElementById("titre");
var nodeChild = document.getElementById("child1");
nodeParent.appendChild(newPara, nodeChild);
}
}
});
Do all your needed operations within the curly bracket of
$.getJSON('messages.json', function(data) {
//The data is finished being filled here
}
This is a callback function. You can learn more about callback function here
Some Info
Yes the loop has to be inside the callback function to run after the data has been retrieved. To give you better understanding about the async function, you can also put your loop outside within a timeout function, but you will never want this, because you will not know how long the operation for retrieving the data will run.
For example:
$.getJSON('messages.json', function(data) {
messages = data;
});
setTimeout(function() {
for (const item of messages) {
if (item.date === todayDay) {
console.log(item.content);
var newPara = document.createElement("p");
var textNode = document.createTextNode(item.content);
newPara.appendChild(textNode);
var nodeParent = document.getElementById("titre");
var nodeChild = document.getElementById("child1");
nodeParent.appendChild(newPara, nodeChild);
}
}
}, 2000); //run after 2 seconds
Im sure the code above will also work, the process of getting the data should not be longer than 2 seconds.
Again this is not a correct way to do it, just to give you better understanding of async function.
I'm learning FRP using Bacon.js, and would like to assemble data from a paginated API in a stream.
The module that uses the data has a consumption API like this:
// UI module, displays unicorns as they arrive
beautifulUnicorns.property.onValue(function(allUnicorns){
console.log("Got "+ allUnicorns.length +" Unicorns");
// ... some real display work
});
The module that assembles the data requests sequential pages from an API and pushes onto the stream every time it gets a new data set:
// beautifulUnicorns module
var curPage = 1
var stream = new Bacon.Bus()
var property = stream.toProperty()
var property.onValue(function(){}) # You have to add an empty subscriber, otherwise future onValues will not receive the initial value. https://github.com/baconjs/bacon.js/wiki/FAQ#why-isnt-my-property-updated
var allUnicorns = [] // !!! stateful list of all unicorns ever received. Is this idiomatic for FRP?
var getNextPage = function(){
/* get data for subsequent pages.
Skipping for clarity */
}
var gotNextPage = function (resp) {
Array.prototype.push.apply(allUnicorns, resp) // just adds the responses to the existing array reference
stream.push(allUnicorns)
curPage++
if (curPage <= pageLimit) { getNextPage() }
}
How do I subscribe to the stream in a way that provides me a full list of all unicorns ever received? Is this flatMap or similar? I don't think I need a new stream out of it, but I don't know. I'm sorry, I'm new to the FRP way of thinking. To be clear, assembling the array works, it just feels like I'm not doing the idiomatic thing.
I'm not using jQuery or another ajax library for this, so that's why I'm not using Bacon.fromPromise
You also may wonder why my consuming module wants the whole set instead of just the incremental update. If it were just appending rows that could be ok, but in my case it's an infinite scroll and it should draw data if both: 1. data is available and 2. area is on screen.
This can be done with the .scan() method. And also you will need a stream that emits items of one page, you can create it with .repeat().
Here is a draft code (sorry not tested):
var itemsPerPage = Bacon.repeat(function(index) {
var pageNumber = index + 1;
if (pageNumber < PAGE_LIMIT) {
return Bacon.fromCallback(function(callback) {
// your method that talks to the server
getDataForAPage(pageNumber, callback);
});
} else {
return false;
}
});
var allItems = itemsPerPage.scan([], function(allItems, itemsFromAPage) {
return allItems.concat(itemsFromAPage);
});
// Here you go
allItems.onValue(function(allUnicorns){
console.log("Got "+ allUnicorns.length +" Unicorns");
// ... some real display work
});
As you noticed, you also won't need .onValue(function(){}) hack, and curPage external state.
Here is a solution using flatMap and fold. When dealing with network you have to remember that the data can come back in a different order than you sent the requests - that's why the combination of fold and map.
var pages = Bacon.fromArray([1,2,3,4,5])
var requests = pages.flatMap(function(page) {
return doAjax(page)
.map(function(value) {
return {
page: page,
value: value
}
})
}).log("Data received")
var allData = requests.fold([], function(arr, data) {
return arr.concat([data])
}).map(function(arr) {
// I would normally write this as a oneliner
var sorted = _.sortBy(arr, "page")
var onlyValues = _.pluck(sorted, "value")
var inOneArray = _.flatten(onlyValues)
return inOneArray
})
allData.log("All data")
function doAjax(page) {
// This would actually be Bacon.fromPromise($.ajax...)
// Math random to simulate the fact that requests can return out
// of order
return Bacon.later(Math.random() * 3000, [
"Page"+page+"Item1",
"Page"+page+"Item2"])
}
http://jsbin.com/damevu/4/edit
I am fetching data from a json file using $.getJSON to get an id of a character and then use $.getJSON to fetch it from another page using the id that I go previously.
However, in the console it says
https://prod.api.pvp.net/api/lol/eune/v1.2/stats/by-summoner/undefined/summary?api_key=API_KEY_HERE
It should show the id where it says unidentified - /by-summoner/undefined/summary
This is my current script:
var input = "netuetamundis";
var sID;
$(document).ready(function () {
// get json from this page to get the ID of the input
$.getJSON("https://prod.api.pvp.net/api/lol/eune/v1.1/summoner/by-name/" + input + "?api_key=API_KEY_HERE", function (name) {
obj = name;
sID = obj.id;
console.log(sID);
});
$.getJSON("https://prod.api.pvp.net/api/lol/eune/v1.2/stats/by-summoner/" + sID + "/summary?api_key=API_KEY_HERE", function (stats) {
console.log(stats);
});
});
When I googled, it said to declare the variable outside the function, which I did as you can see in the code, however it still doesn't work.
Your variable declarations and their scope are correct. The problem you are facing is that the first AJAX request may take a little bit time to finish. Therefore, the second URL will be filled with the value of sID before the its content has been set. You have to remember that AJAX request are normally asynchronous, i.e. the code execution goes on while the data is being fetched in the background.
You have to nest the requests:
$.getJSON("https://prod.api.pvp.net/api/lol/eune/v1.1/summoner/by-name/"+input+"?api_key=API_KEY_HERE" , function(name){
obj = name;
// sID is only now available!
sID = obj.id;
console.log(sID);
});
Clean up your code!
Put the second request into a function
and let it accept sID as a parameter, so you don't have to declare it globally anymore!
(Global variables are almost always evil!)
Remove sID and obj variables - name.id is sufficient unless you really need the other variables outside the function.
$.getJSON("https://prod.api.pvp.net/api/lol/eune/v1.1/summoner/by-name/"+input+"?api_key=API_KEY_HERE" , function(name){
// We don't need sID or obj here - name.id is sufficient
console.log(name.id);
doSecondRequest(name.id);
});
/// TODO Choose a better name
function doSecondRequest(sID) {
$.getJSON("https://prod.api.pvp.net/api/lol/eune/v1.2/stats/by-summoner/" + sID + "/summary?api_key=API_KEY_HERE", function(stats){
console.log(stats);
});
}
Hapy New Year :)
$.getJSON is an asynchronous request, meaning the code will continue to run even though the request is not yet done. You should trigger the second request when the first one is done, one of the choices you seen already in ComFreek's answer.
Alternatively you could use jQuery's $.when/.then(), similar to this:
var input = "netuetamundis";
var sID;
$(document).ready(function () {
$.when($.getJSON("https://prod.api.pvp.net/api/lol/eune/v1.1/summoner/by-name/" + input + "?api_key=API_KEY_HERE", function () {
obj = name;
sID = obj.id;
console.log(sID);
})).then(function () {
$.getJSON("https://prod.api.pvp.net/api/lol/eune/v1.2/stats/by-summoner/" + sID + "/summary?api_key=API_KEY_HERE", function (stats) {
console.log(stats);
});
});
});
This would be more open for future modification and separates out the responsibility for the first call to know about the second call.
The first call can simply complete and do it's own thing not having to be aware of any other logic you may want to add, leaving the coupling of the logic separated.
I'm writing a titanium app but I'm having an issue with the execution order of my javascript.
I have an event listener on a button. It's a reload button that clears a table, uses HTTPClient to GET a JSON array of 'appointments', saves each appointment, and refreshes a table list. The problem is I am executing the table delete first which should clear the table, then I get the appointments but when the app refreshes the datatable it's like it's doing it too soon and the new appointments haven't been saved yet because I'm getting an empty list. Now if I comment out the db.deleteAll line, each time I click reload the list is refreshed with the new (and existing) appointment data.
I need to make sure everything is done in order and only when the previous task is dfinished. So appointments.download() has to be executed AFTER db.DeleteAll and the list refresh has to be executed AFTER var allAppointments = db.All();
I think the problem is that the appointments.download() function has to make a HTTP GET call and then save the results and the other functions are not waiting until it's finished.
Here is the code:
btnReload.addEventListener('click', function(e){
var affected = db.deleteAll();
appointments.download();
var allAppointments = db.all();
Ti.API.info(allAppointments);
appointmentList.setData(allAppointments);
});
Here are the functions that are being called:
db.deleteAll():
api.deleteAll = function(){
conn.execute('DELETE FROM appointments');
return conn.rowsAffected;
}
appointments.download():
var appointments = (function() {
var api = {};
api.download = function(){
var xhr = Titanium.Network.createHTTPClient();
xhr.onload = function()
{
var data = JSON.parse(this.responseText);
var dl = (data.length);
for(i=0; i<dl;i++)
{
//p = addRow(data,i); // returns the **arr array
//Ti.API.info('Saving : '+data[i].first_name);
var contact_name = data[i].first_name + ' ' + data[i].last_name;
var start_date = data[i].start_date;
var reference = data[i].reference;
var comment = data[i].comment;
var appointment_id = data[i].quote_id;
var lastid = db.create(appointment_id, start_date, reference, contact_name, comment);
//Ti.API.info(lastid);
}
};
xhr.open('GET','http://********.co.uk/appointments/download/');
xhr.send();
return;
}
Any help most appreciated!
Billy
Synchronous calls give you coordination (code won't execute until any computation it depends on finishes) for free. With asynchronous calls, you have to take care of coordination. This generally means passing the dependent code as a function to the asynchronous code. The passed code is known as a "continuation", which means "the rest of the calculation, from a given point forward". Passing continuations around is known as (unsurprisingly) "continuation passing style".
To rewrite code in CPS, identify the point(s) where you need to coordinate the code (the call to appointments.download), then wrap the rest of the code in a function.
btnReload.addEventListener('click', function(e){
var affected = db.deleteAll();
appointments.download();
function () {
var allAppointments = db.all();
Ti.API.info(allAppointments);
appointmentList.setData(allAppointments);
}
});
In the general case, the return value becomes the argument to the continuation. Here, no return value for appointments.download is used, so the continuation takes no arguments.
Next, rewrite the asynchronous function to take the continuation and pass the continuation in the call.
btnReload.addEventListener('click', function(e){
var affected = db.deleteAll();
appointments.download(
function () {
var allAppointments = db.all();
Ti.API.info(allAppointments);
appointmentList.setData(allAppointments);
});
});
...
api.download = function(_return){
var xhr = Titanium.Network.createHTTPClient();
xhr.onload = function() {
var data = JSON.parse(this.responseText);
var dl = (data.length);
for (i=0; i<dl;i++) {
//p = addRow(data,i); // returns the **arr array
//Ti.API.info('Saving : '+data[i].first_name);
var contact_name = data[i].first_name + ' ' + data[i].last_name;
var start_date = data[i].start_date;
var reference = data[i].reference;
var comment = data[i].comment;
var appointment_id = data[i].quote_id;
var lastid = db.create(appointment_id, start_date, reference, contact_name, comment);
//Ti.API.info(lastid);
}
_return();
};
xhr.open('GET','http://********.co.uk/appointments/download/');
xhr.send();
return;
}
The continuation is named _return because the return statement can be modeled as a continuation (the default continuation). Calling _return in the asynchronous version would have the same affect as calling return in the synchronous version.
Currently you are making requests asynchronously which means you make a request and return from the function immediately, you don't wait for an answer. You should make your calls synchronous, I don't know what your conn and xhr really are but they might provide ways to make the execute() and send() methods synchronous. For example if you set the third argument of JavaScript's own XMLHttpRequest's open() method to false then send() method will not return until a response is received from the server, your connection classes might have the same option.
Move the call to delete the current appointments into the onload handler. That way you will delete the old and immediately add the new data.