Async calls to service in javascript - javascript

In my project I am calling same service for more than 2-3 times for finding distance & time for my route in javascript using cloudmode service.
I am having one polyline, I am getting each point of polyline & passing two consecutive points to service to get response from it.
What I am doing is
function showPointsRoutes(e)
{
var a = e.target.getLatLngs();
for(var i = 1 ; i < a.length ; i++)
{
var as ="http://routes.cloudmade.com/BC9A493B41014CAABB98F0471D759707/api/0.3/" + a[i-1].lat+","+a[i-1].lng+","+a[i].lat+","+a[i].lng + "/car/shortest.js?callback=getRouteResponse";
addScript(as);
}
}
function getRouteResponse(response)
{
mytimeArray.push[response.route_summary.total_time];
myDistancArray.push[response.route_summary.total_distance];
}
function addScript(url)
{
var script = document.createElement('script');
script.type="text/javascript";
script.src=url;
document.getElementsByTagName('head') [0].appendChild(script);
}
But sometimes what happens is that response for some requests comes late.
due to this I am not getting proper times & distance between points.
Can any one suggest some way so that I get values in array as sequence in which request was made.

The reason they give you different callback names is so that you can differentiate requests by using a different callback for each one. Try something like this:
Note: #Rodrigo Assis's suggestion would be better in this case since the API supports this. Code below demonstrates a way to handle this when the API forces using multiple requests.
function showPointsRoutes(e)
{
var a = e.target.getLatLngs();
for(var i = 1 ; i < a.length ; i++)
requestRoute(i, a[i-1], a[i]);
}
function requestRoute(i, a, b)
{
window["getRouteResponse" + i] = function(response)
{
mytimeArray[i] = response.route_summary.total_time;
myDistancArray[i] = response.route_summary.total_distance;
};
var as ="http://routes.cloudmade.com/BC9A493B41014CAABB98F0471D759707/api/0.3/" + a.lat+","+a.lng+","+b.lat+","+b.lng + "/car/shortest.js?callback=getRouteResponse" + i;
addScript(as);
}
function addScript(url)
{
var script = document.createElement('script');
script.type="text/javascript";
script.src=url;
document.getElementsByTagName('head') [0].appendChild(script);
}

Related

JavaScript Anonymous function in function behaves strangely [duplicate]

This question already has answers here:
How do I return the response from an asynchronous call?
(41 answers)
Closed 5 years ago.
I am doing simple app in javascript. I have "main_script" where I invoke everything. There is global variable "feeds" which is an array, like this:
var feeds = [];
Then after that I use function, that loads JSON file from multipe URLs (also array):
feeds = LoadJsonFeeds(urls); // Load feeds
console.log("main_code feeds.length: " + feeds.length);
That console log I mention later. Ok and now he is my LoadJsonFeeds (in different .js file, just a function):
function LoadJsonFeeds(urls) {
var feeds_tmp = [];
// URLs can be more - for example 50 feeds from url[0] and 20 from url[1]
for(var u = 0; u < url.length; u++) {
$.getJSON(url[u], function(data) {
var allFeeds = data.Result.Items; // allFeeds without check if they are ok
for(var i = 0; i < allFeeds.length; i++) {
// Is feed ok?
if (allFeeds[i].Text != null)
{
// Some more checking, but lets say ok for this
feeds_tmp.push(allFeeds[i]);
}
// This I mention later
console.log("LoadJson feeds.length: " + feeds.length);
}
});
}
console.log("LoadJson return"); // Mention later
return feeds_tmp;
}
And here is the problem I am struggling with. When I look at the console, here what I see:
LoadJson return
main_code feeds.length: 0
LoadJson feeds.length: 1
LoadJson feeds.length: 2
LoadJson feeds.length: 3
etc...
I just don't see the logic behind it! How can it first returned the function with nothing, then the main_script continues. After that, the function ALTER one by one the global variable "feeds". I suspect the anonymous function, but don't know what to do with it.
What am I trying to achive? Simple, I wanted to have function, that load JSON files from URLs. For example url[0] has 50 feeds, url[1] has 20. If everything is ok then it should return array of 70 feeds. I use this for the first time in main_script, and then in interval for update, which I call every few seconds. In this function I check, which feed is new and put it somewhere else:
function UpdateFeeds(url) {
console.log("updating...");
var feeds_tmp = LoadJsonFeeds(url);
console.log("Update feeds_tmp.length: " + feeds_tmp.length); // This is 0
for(var f_tmp = 0; f_tmp < feeds_tmp.length; f_tmp++) { // This does not happen because feeds_tmp.length = 0
for(var f = 0; f < feeds.length; f++) {
// Check what feed is new and put it somewhere else (the new one)
}
}
}
feeds = feeds_tmp; // Make all new feeds the global variable
}
But since the returned array is 0, that forloop does not happen. But it will still alter the global variable "feeds" anyway. For the main function it does not matter. In global variable the datas are in it, but I really need to find a new ones and do some work with it. But since it does not work that way, I am pretty lost.
What am I missing and how to fix this? Thank you!
Your console.log("LoadJson feeds.length: " + feeds.length); called later because its a asynchronous call , you can update this function as
function LoadJsonFeeds(urls,callback) {
var feeds_tmp = [];
// URLs can be more - for example 50 feeds from url[0] and 20 from url[1]
for(var u = 0; u < url.length; u++) {
$.getJSON(url[u], function(data) {
var allFeeds = data.Result.Items; // allFeeds without check if they are ok
for(var i = 0; i < allFeeds.length; i++) {
// Is feed ok?
if (allFeeds[i].Text != null)
{
// Some more checking, but lets say ok for this
feeds_tmp.push(allFeeds[i]);
}
// This I mention later
console.log("LoadJson feeds.length: " + feeds.length);
}
if(u==url.length.1) // to make sure all URL loaded
callback(feeds_tmp)
});
}
}
And call your function as
feeds = LoadJsonFeeds(urls,function(feeds){
console.log("main_code feeds.length: " + feeds.length);
}); // Load feeds

AngularJS - Is controller code executed line by line?

I'm new to AngularJS, and is experimenting AngularJS with Twitch API.
I have a list of channels that I'm interested in, defined as var channels.
Then I use the $http.get function to loop through another array, twitchList.channels, which contains the API addresses that I'm supposed to call.
(function() {
var app = angular.module('twitchList', []);
app.controller('twitchController', ['$http', function($http){
var twitchList = this;
twitchList.channels = [];
var channels = ["freecodecamp", "storbeck", "terakilobyte", "habathcx","RobotCaleb","thomasballinger","noobs2ninjas","beohoff", "MedryBW"];
for (var i = 0; i < channels.length; i++ ) {
twitchList.channels.push({
name: channels[i],
api: 'https://api.twitch.tv/kraken/streams/' + channels[i],
})
}
var data_list = [];
for (var j = 0; j < twitchList.channels.length; j++) {
$http.get(twitchList.channels[j].api).success(function(data){
data_list.push(data);
})
}
// Issue arises here!
console.log(data_list);
console.log(data_list.length);
}]);
})();
The API calls seems to be working perfectly, however, I need to get the results of the API call into an array, called data_list. Now, when I print data_list, and data_list.length, what happens is that data_list.length always returns 0, and data_list is sometimes populated (meaning it's either 0 size array or 9 size array). Even though the property of the array has a length 9, but calling .length always gives 0.
This let me think that the controller code is not executed line by line? Or is there something wrong with my logic?
Can someone give me a pointer? Thanks
No, this line:
data_list.push(data);
will be executed when you receive a response on the http request sent a line above. Hence the following lines:
console.log(data_list);
console.log(data_list.length);
will output [] and 0
I've not used it before, but could you possibly use $q.all in order to resolve multiple promises? I've used the equivalent $.when function in jQuery to achieve this in the past.
var data_list = [];
var promise_array = [];
var request;
for (var j = 0; j < twitchList.channels.length; j++) {
request = $http.get(twitchList.channels[j].api);
request.success(function(data) {
data_list.push(data);
});
promise_array.push(request);
}
$q.all(promise_array).then( function() {
console.log(data_list);
console.log(data_list.length);
});

Can't add new name and value to JSON object dynamically with condition using JavaScript

I can't add new name and value ff. this given condition:
$.each(names, function (i, name) {
$.get('https://www.example.com/path/' + name, function (data) {
var arrNow = CSVToArray(data, ',');
allArr.push(arrNow);
counter++;
if (counter === names.length) {
for (var j = 0; j < allArr.length; j++) {
for (var k = 1; k < allArr[j].length; k++) {
//console.log(allArr[j][k][0] + ': ' + allArr[j][k][1]);
//var f = moment(allArr[j][k][0]).format('lll');
var f = allArr[j][k][0];
json.push({
"datetime": f
});
if (j == 0) {
if (json[k].datetime === allArr[0][k][0]) {
var newAtt = "water_actual";
var newValue = allArr[0][k][1];
json[k][newAtt] = newValue;
}
}
if (j == 1) {
if (json[k].datetime === allArr[1][k][0]) {
var newAtt = "rainfall_actual";
var newValue = allArr[1][k][1];
json[k][newAtt] = newValue;
}
}if (j == 2) {
if (json[k].datetime == allArr[2][k][0]) {
var newAtt = "forecast_water";
var newValue = allArr[2][k][1];
json[k][newAtt] = newValue;
}
}
}
}
};
});
});
I was able to add a new namewater_actual and its value using if statement. If the datetime from the json object matches to the array value(date and time), I'd like to add it with its specific name as stated above. But I can't seem to make it work.
Here's the fiddle.
If I may provide some general feedback: it's probably good practice to simplify your code to the minimum example that reproduces your problem. Not only can that drastically increase your chances of fixing it yourself, it also increases the odds that you'll get help here.
With that in mind, consider the basic structure of what you're trying here:
var someNames = ["foo", "bar"];
var allTheData = [{
"aardvark": true
}];
$.each(someNames, function (i, name) {
$.get('http://example.com/api/' + name, function (data) {
data.aNewProperty = 'wombat';
allTheData.push(data);
});
});
console.log(allTheData);
Here, $.each iterates through everything in someNames and then proceeds immediately to the console.log statement. For all we know, each individual API call ($.get) could take seconds, or minutes. By this time we've already tried to use the contents of allTheData, which may or may not have been modified.
To avoid this sort of thing in legacy JavaScript we can make use of the callback already provided by $.get:
$.get('http://example.com/api/' + name, function (data) {
data.aNewProperty = 'wombat';
console.log(data);
});
Inside the callback, we know for sure that the API request has already completed (although the above assumes that it succeeded, which is a whole other kettle of fish). This would output the result of each API request as the responses arrive, though not necessarily in the order you'd expect!
JavaScript's asynchronous nature tended to lead in the past to a whole lot of callbacks. With the advent of ES6 we have some more options available to us, especially promises.

Load dictionary file with ajax and don't crash iPhone Mobile Safari

I have a web application where I load (via ajax) a dictionary file (1MB) into the javascript array. I found the reason why the Mobile Safari crashes after 10 seconds. But now what I'm wondering is how do I get around this issue?
On the link above the answer suggest using setInterval, but this would mean I would have to have a dictionary file chunked into pieces and have them loaded one by one. This surely could be done, but I would have to make a lot of chunks taking into account the internet speed and too many requests would take forever for the page to load (and if I make the chunks too big it could happen that some mobile users wouldn't be able to download the chunk in a given 10second period).
So, my question is: has anyone encountered this kind of problem and how did you go about it? A general push in the right direction is appreciated.
edit:
This is the js code which I use to load the dictionary:
var dict = new Trie();
$.ajax({
url: 'data/dictionary_342k_uppercase.txt',
async: true,
success: function (data) {
var words = data.split('\n');
for (var i = words.length - 1; i >= 0; i--) {
dict.insert(words[i]);
}
},
error: function(){
$('#loading-message').text("Problem s rječnikom");
}
});
Trie.js:
function Trie () {
var ALPHABET_SIZE = 30;
var ASCII_OFFSET = 'A'.charCodeAt();
this.children = null;
this.isEndOfWord = false;
this.contains = function (str) {
var curNode = this;
for (var i = 0; i < str.length; i++) {
var idx = str.charCodeAt(i) - ASCII_OFFSET;
if (curNode.children && curNode.children[idx]) {
curNode = curNode.children[idx];
} else {
return false;
}
}
return curNode.isEndOfWord;
}
this.has = function (ch) {
if (this.children) {
return this.children[ch.charCodeAt() - ASCII_OFFSET] != undefined;
}
return false;
}
this.next = function (ch) {
if (this.children) {
return this.children[ch.charCodeAt() - ASCII_OFFSET];
}
return undefined;
}
this.insert = function (str) {
var curNode = this;
for (var i = 0; i < str.length; i++) {
var idx = str.charCodeAt(i) - ASCII_OFFSET;
if (curNode.children == null) {
curNode.children = new Array(ALPHABET_SIZE);
curNode = curNode.children[idx] = new Trie();
} else if (curNode.children[idx]) {
curNode = curNode.children[idx];
} else {
curNode = curNode.children[idx] = new Trie();
}
}
curNode.isEndOfWord = true;
return curNode;
}
}
This is a very common issue once you start doing processing in JS. If the Mobile Safari issue is the cause then what you want to do is figure out where the CPU time is going here.
I'm assuming it's the dict.insert() loop and not the data.split() call (that would be a bit more difficult to manage).
The idea here is to split up the dict.insert() loop into functional blocks that can be called asynchronously in a sequenced loop (which is what the setupBuildActions function does). After the first block each subsequent block is called via setTimeout, which effectively resets the function-time counter in the JS runtime (which seems to be what's killing your process).
Using the Sequencer function means you also keep control of the order in which the functions are run (they always run in the sequence they are generated in here and no two or more functions are scheduled for execution at the same time). This is much more effective than firing off thousands of setTimeout calls without callbacks. Your code retains control over the order of execution (which also means you can make changes during execution) and the JS runtime isn't overloaded with scheduled execution requests.
You might also want to check the node project at https://github.com/michiel/sequencer-js for more sequencing examples and http://ejohn.org/blog/how-javascript-timers-work/ for an explanation on setTimeout on different platforms.
var dict = new Trie();
// These vars are accessible from all the other functions we're setting up and
// running here
var BLOCKSIZE = 500;
var words = [];
var buildActions = [];
function Sequencer(funcs) {
(function() {
if (funcs.length !== 0) {
funcs.shift()(arguments.callee);
}
})();
}
// Build an Array with functions that can be called async (using setTimeout)
function setupBuildActions() {
for (var offset=0; offset<words.length; offset+= BLOCKSIZE) {
buildActions.push((function(offset) {
return function(callback) {
for (var i=offset; i < offset + BLOCKSIZE ; i++) {
if (words[i] !== null) { // ugly check for code brevity
dict.insert(words[i]);
}
}
// This releases control before running the next dict.insert loop
setTimeout(callback, 0);
};
})(offset));
}
}
$.ajax({
url: 'data/dictionary_342k_uppercase.txt',
async: true,
success: function (data) {
// You might want to split and setup these calls
// in a setTimeout if the problem persists and you need to narrow it down
words = data.split('\n');
setupBuildActions();
new Sequencer(buildActions);
},
error: function(){
$('#loading-message').text("Problem s rječnikom");
}
});
Here's an example using setTimeout to defer the actual insertion of words into your trie. It breaks up the original string into batches, and uses setTimeout to defer processing of inserting each batch of words. The batch size in my example is 5 words.
The actual batch insertion happens as subsequent event handlers in the browser.
It's possible that just breaking the words up into batches might take too long. If you hit this problem, remember you can chain setTimeout() calls, eg iterating for a while then using setTimeout to schedule another event to iterate over some more, then setTimeout again, etc.
function addBatch(batch)
{
console.log("Processing batch:");
for (var i = 0; i < batch.length; i++)
console.log(batch[i]);
console.log("Return from processing batch");
}
var str = "alpha\nbravo\ncharlie\ndelta\necho\nfoxtrot\n" +
"golf\nhotel\nindia\njuliet\nkilo\nlima\n" +
"mike\nnovember\noscar\npapa\nquebec\n" +
"romeo\nsierra\ntango\nuniform\n" +
"victor\nwhiskey\nxray\nyankee\nzulu";
var batch = []
var wordend;
for (var wordstart = 0; wordstart < str.length; wordstart = wordend+1)
{
wordend = str.indexOf("\n", wordstart);
if (wordend < 0)
wordend = str.length;
var word = str.substring(wordstart, wordend);
batch.push(word);
if (batch.length > 5)
{
setTimeout(addBatch, 0, batch);
batch = [ ];
}
}
setTimeout(addBatch, 0, batch);
batch = [ ];

Synchronization problem in functions of Javascript

I am developing an application,In that application i have many functions that return us server data.. I have 40 function that needs to be executed 1 by 1 for synchronization to take place.SO i have taken a single function and placed my 40 functions in it... Now the problem is that some of these function skip performing database operations
My one out of 40 function to perform this is
function sync_down_client_info(){
try {
parent.parent.parent.stmt_select.text = "select query";
parent.parent.parent.stmt_select.execute();
parent.parent.parent.result = parent.parent.parent.stmt_select.getResult();
if (parent.parent.parent.result.data != null) {
for (var i = 0; i < parent.parent.parent.result.data.length; i++) {
var admin_id = parent.parent.parent.admin_id;
var sync_client_date = parent.parent.parent.result.data[i].last_sync_client;
// alert(admin_id+"======="+ sync_client_date);
GATEWAY = 'http://projects/one_acc/flashservices/gateway.php';
conn = new parent.parent.parent.air.NetConnection();
conn.connect(GATEWAY);
response = new parent.parent.parent.air.Responder(clientresult, clientFault);
conn.call('down_client_info', response,admin_id,sync_client_date);
response = new parent.parent.parent.air.Responder(clientserverdatetimeResult, clientserverdatetimeFault);
conn.call('check_datetime', response);
}
};
and clientresult function is
function clientresult(e)
{
for (var i=0; i<e.length; i++) {
//alert(e.length);
parent.parent.parent.stmt_select.text="select query ;
parent.parent.parent.stmt_select.execute();
parent.parent.parent.result = parent.parent.parent.stmt_select.getResult();
if(parent.parent.parent.result.data!=null){
parent.parent.parent.stmt_insert.text= "update client_info ;
parent.parent.parent.stmt_insert.execute();
}
can anyone tell me how to do this syncing?
jQuery has constructs like the AjaxQueue to avoid these race conditions you are seeing. I'd advise using these plugins.
Refer http://plugins.jquery.com/project/ajaxqueue

Categories

Resources