I am developing an application,In that application i have many functions that return us server data.. I have 40 function that needs to be executed 1 by 1 for synchronization to take place.SO i have taken a single function and placed my 40 functions in it... Now the problem is that some of these function skip performing database operations
My one out of 40 function to perform this is
function sync_down_client_info(){
try {
parent.parent.parent.stmt_select.text = "select query";
parent.parent.parent.stmt_select.execute();
parent.parent.parent.result = parent.parent.parent.stmt_select.getResult();
if (parent.parent.parent.result.data != null) {
for (var i = 0; i < parent.parent.parent.result.data.length; i++) {
var admin_id = parent.parent.parent.admin_id;
var sync_client_date = parent.parent.parent.result.data[i].last_sync_client;
// alert(admin_id+"======="+ sync_client_date);
GATEWAY = 'http://projects/one_acc/flashservices/gateway.php';
conn = new parent.parent.parent.air.NetConnection();
conn.connect(GATEWAY);
response = new parent.parent.parent.air.Responder(clientresult, clientFault);
conn.call('down_client_info', response,admin_id,sync_client_date);
response = new parent.parent.parent.air.Responder(clientserverdatetimeResult, clientserverdatetimeFault);
conn.call('check_datetime', response);
}
};
and clientresult function is
function clientresult(e)
{
for (var i=0; i<e.length; i++) {
//alert(e.length);
parent.parent.parent.stmt_select.text="select query ;
parent.parent.parent.stmt_select.execute();
parent.parent.parent.result = parent.parent.parent.stmt_select.getResult();
if(parent.parent.parent.result.data!=null){
parent.parent.parent.stmt_insert.text= "update client_info ;
parent.parent.parent.stmt_insert.execute();
}
can anyone tell me how to do this syncing?
jQuery has constructs like the AjaxQueue to avoid these race conditions you are seeing. I'd advise using these plugins.
Refer http://plugins.jquery.com/project/ajaxqueue
Related
Ok, maybe is not the best title, but I lacked inspiration, so here goes:
Let's say you have a "global" (not really) variable to store temporary data and sub data as random users interact with your server. Normally on the first interaction with your server, the main variable will be undefined so you need to handle that case.
Now, what puzzled me about this, is what's the best practice performance wise to do this if there are a lot of users and a lot way more interactions with the variable.
Puzzled? Yeah, I know, words are not my strong point so let me show you in code
So you have
var user_data = [];
Then a function that handles user interaction to store data
function writeData(uid, data_name, data)
Now, on first interaction, user_data[uid][data_name] is undefined, and so it's user_data[uid]
I know you can handle this 2 ways:
With if -
if(!user_data[uid]) user_data[uid] = {}
user_data[uid][data_name] = data
With try/catch
try{user_data[uid][data_name] = data}
catch(e) {user_data[uid] = {}; writeData(uid, data_name, data)}
The if will check on every interaction, and like I said there are a lot.
Try catch will trigger once, but it has a cost as a block (afaik)
Which one is better? Or is there a another better way
#Nertan ,
There is a partiality in your proof :P . I have slightly tweeked the ternary way (same as the order of execution in if way). With this you can conclude.
//var present = require('present');
function test(val,ud,fun) {
var k = 10000000;
var t = Date.now();
for(var i=0; i<k;i++)
{
var uid = Math.ceil(Math.random()*1000);
fun(uid,ud,"value");
}
var tf = Date.now()-t;
return tf;
}
function setValue_Opp(uid,ud,value)
{
(!ud[uid] && (ud[uid] = {})) && (ud[uid].value = value);
}
function setValue_Try(uid,ud,value)
{
try{ ud[uid].value = value}
catch(e){ ud[uid] = {}; setValue_Try(uid,ud,value)};
}
function setValue_Cond(uid,ud,value)
{
if(!ud[uid]) ud[uid] = {}
ud[uid].value = value;
}
var k1=0;
var k2=0;
var k3=0;
for(var i=0;i<10;i++){
k1+=test(1,{}, setValue_Cond);
k2+=test(2,{}, setValue_Try);
k3+=test(3,{}, setValue_Opp);
}
console.log(k1,k2,k3)
I feel we can take advantage of ES6 ternaries as below:
let user_data = {}
const writeData = (uid, data_name, data) => {
((user_data[uid] || (user_data[uid] = {})) && (user_data[uid][data_name] = data ))
console.log(user_data)
// perform write action
}
writeData('1',"test","test1");
writeData('2',"test","test2");
writeData('1',"test","test3");
Ok, so I had to rewrite the test because it doesn't work fine in the Snippet
So I made this for node.js:
var present = require('present');
function test(val,ud,fun) {
var k = 10000000;
var t = present();
for(var i=0; i<k;i++)
{
var uid = Math.ceil(Math.random()*1000);
fun(uid,ud,"value");
}
var tf = present()-t;
console.log("END "+val+" at "+tf);
return tf;
}
function setValue_Opp(uid,ud,value)
{
(ud[uid] || (ud[uid] = {})) && (ud[uid].value = value);
}
function setValue_Try(uid,ud,value)
{
try{ ud[uid].value = value}
catch(e){ ud[uid] = {}; setValue_Try(uid,ud,value)};
}
function setValue_Cond(uid,ud,value)
{
if(!ud[uid]) ud[uid] = {}
ud[uid].value = value;
}
var k1=0;
var k2=0;
var k3=0;
for(var i=0;i<10;i++){
k1+=test(1,{}, setValue_Cond);
k2+=test(2,{}, setValue_Try);
k3+=test(3,{}, setValue_Opp);
}
console.log(k1,k2,k3)
And in the end:
3244.328997004777 3695.0267750024796 3437.6855720058084
Which means:
The best is the classical if
The second best is condintional operators method
And the worst is the try-catch
So it seems the classics win
Edited:
With further tests thanks to #CRayen the best method is :
(!ud[uid] && (ud[uid] = {})) && (ud[uid].value = value);
I am making a call to an API. The API returns a list of results. When it does so - the response is fed into an object which I then use to iterate through and display them.
Here is the function which does that:
var getAvailability = () => {
if (chosenData.hotel == "") {
showError("Please select a location before booking.");
$timeout(() => LTBNavService.setTab('location'), 50);
return;
}
searchResponse = {};
console.log(searchResponse);
WebAPI.getHotelAvailability(genSearchObject()).then((data) => {
searchResponse = data;
$timeout(() => $('[data-tab-content] .search-btn').first().focus(), 50);
generateRoomTypeObject(searchResponse);
}, (data) => searchResponse.error = data.data.errors[0].error);
};
The Problem:
The old results are still displayed until the new set of results are available. This causes a flicker and a delay which is a bad user experience.
The solution:(which i need help with)
What is the best possible way of handling this problem? Ideally, I would like to reset/clear the search response. As in, the new results are delivered and the old ones are cleared. Is this possible from within the getAvailability function?
What would be the best way to achieve this?
The Solution:
Thanks to #Daniel Beck for his suggestion to call the generateRoomTypeObject function and feed it an empty object - +1'd his comment.
This triggered an undefined error in my generateRoomTypeObject function where i was running a few length checks(makes sense, because the object was empty - so there was nothing to do length checks on).
I handled the error by handling the undefined exception and setting the searchResponse to an empty object.
var generateRoomTypeObject = (searchResponse) => {
var ratePlans = searchResponse.ratePlans,
errors = searchResponse.error,
roomTypes = [],
ignoreBiggerRooms = false;
rawRoomsObjs = [];
if (angular.isUndefined(errors)) {
// Iterate over the rate plan
if(ratePlans === undefined){
//generateRoomTypeObject -- Handle undefined by creating new object
searchResponse = {}
}else{
for (var i = 0; i < ratePlans.length; i++) {
var ratePlan = ratePlans[i],
rooms = ratePlan.rooms;
// Iterate over the rooms and add rooms to room object. Also keep a list of room types.
for (var j = 0; j < rooms.length; j++) {
//Stuff here
}
}
}
}
I have a web application where I load (via ajax) a dictionary file (1MB) into the javascript array. I found the reason why the Mobile Safari crashes after 10 seconds. But now what I'm wondering is how do I get around this issue?
On the link above the answer suggest using setInterval, but this would mean I would have to have a dictionary file chunked into pieces and have them loaded one by one. This surely could be done, but I would have to make a lot of chunks taking into account the internet speed and too many requests would take forever for the page to load (and if I make the chunks too big it could happen that some mobile users wouldn't be able to download the chunk in a given 10second period).
So, my question is: has anyone encountered this kind of problem and how did you go about it? A general push in the right direction is appreciated.
edit:
This is the js code which I use to load the dictionary:
var dict = new Trie();
$.ajax({
url: 'data/dictionary_342k_uppercase.txt',
async: true,
success: function (data) {
var words = data.split('\n');
for (var i = words.length - 1; i >= 0; i--) {
dict.insert(words[i]);
}
},
error: function(){
$('#loading-message').text("Problem s rječnikom");
}
});
Trie.js:
function Trie () {
var ALPHABET_SIZE = 30;
var ASCII_OFFSET = 'A'.charCodeAt();
this.children = null;
this.isEndOfWord = false;
this.contains = function (str) {
var curNode = this;
for (var i = 0; i < str.length; i++) {
var idx = str.charCodeAt(i) - ASCII_OFFSET;
if (curNode.children && curNode.children[idx]) {
curNode = curNode.children[idx];
} else {
return false;
}
}
return curNode.isEndOfWord;
}
this.has = function (ch) {
if (this.children) {
return this.children[ch.charCodeAt() - ASCII_OFFSET] != undefined;
}
return false;
}
this.next = function (ch) {
if (this.children) {
return this.children[ch.charCodeAt() - ASCII_OFFSET];
}
return undefined;
}
this.insert = function (str) {
var curNode = this;
for (var i = 0; i < str.length; i++) {
var idx = str.charCodeAt(i) - ASCII_OFFSET;
if (curNode.children == null) {
curNode.children = new Array(ALPHABET_SIZE);
curNode = curNode.children[idx] = new Trie();
} else if (curNode.children[idx]) {
curNode = curNode.children[idx];
} else {
curNode = curNode.children[idx] = new Trie();
}
}
curNode.isEndOfWord = true;
return curNode;
}
}
This is a very common issue once you start doing processing in JS. If the Mobile Safari issue is the cause then what you want to do is figure out where the CPU time is going here.
I'm assuming it's the dict.insert() loop and not the data.split() call (that would be a bit more difficult to manage).
The idea here is to split up the dict.insert() loop into functional blocks that can be called asynchronously in a sequenced loop (which is what the setupBuildActions function does). After the first block each subsequent block is called via setTimeout, which effectively resets the function-time counter in the JS runtime (which seems to be what's killing your process).
Using the Sequencer function means you also keep control of the order in which the functions are run (they always run in the sequence they are generated in here and no two or more functions are scheduled for execution at the same time). This is much more effective than firing off thousands of setTimeout calls without callbacks. Your code retains control over the order of execution (which also means you can make changes during execution) and the JS runtime isn't overloaded with scheduled execution requests.
You might also want to check the node project at https://github.com/michiel/sequencer-js for more sequencing examples and http://ejohn.org/blog/how-javascript-timers-work/ for an explanation on setTimeout on different platforms.
var dict = new Trie();
// These vars are accessible from all the other functions we're setting up and
// running here
var BLOCKSIZE = 500;
var words = [];
var buildActions = [];
function Sequencer(funcs) {
(function() {
if (funcs.length !== 0) {
funcs.shift()(arguments.callee);
}
})();
}
// Build an Array with functions that can be called async (using setTimeout)
function setupBuildActions() {
for (var offset=0; offset<words.length; offset+= BLOCKSIZE) {
buildActions.push((function(offset) {
return function(callback) {
for (var i=offset; i < offset + BLOCKSIZE ; i++) {
if (words[i] !== null) { // ugly check for code brevity
dict.insert(words[i]);
}
}
// This releases control before running the next dict.insert loop
setTimeout(callback, 0);
};
})(offset));
}
}
$.ajax({
url: 'data/dictionary_342k_uppercase.txt',
async: true,
success: function (data) {
// You might want to split and setup these calls
// in a setTimeout if the problem persists and you need to narrow it down
words = data.split('\n');
setupBuildActions();
new Sequencer(buildActions);
},
error: function(){
$('#loading-message').text("Problem s rječnikom");
}
});
Here's an example using setTimeout to defer the actual insertion of words into your trie. It breaks up the original string into batches, and uses setTimeout to defer processing of inserting each batch of words. The batch size in my example is 5 words.
The actual batch insertion happens as subsequent event handlers in the browser.
It's possible that just breaking the words up into batches might take too long. If you hit this problem, remember you can chain setTimeout() calls, eg iterating for a while then using setTimeout to schedule another event to iterate over some more, then setTimeout again, etc.
function addBatch(batch)
{
console.log("Processing batch:");
for (var i = 0; i < batch.length; i++)
console.log(batch[i]);
console.log("Return from processing batch");
}
var str = "alpha\nbravo\ncharlie\ndelta\necho\nfoxtrot\n" +
"golf\nhotel\nindia\njuliet\nkilo\nlima\n" +
"mike\nnovember\noscar\npapa\nquebec\n" +
"romeo\nsierra\ntango\nuniform\n" +
"victor\nwhiskey\nxray\nyankee\nzulu";
var batch = []
var wordend;
for (var wordstart = 0; wordstart < str.length; wordstart = wordend+1)
{
wordend = str.indexOf("\n", wordstart);
if (wordend < 0)
wordend = str.length;
var word = str.substring(wordstart, wordend);
batch.push(word);
if (batch.length > 5)
{
setTimeout(addBatch, 0, batch);
batch = [ ];
}
}
setTimeout(addBatch, 0, batch);
batch = [ ];
In my project I am calling same service for more than 2-3 times for finding distance & time for my route in javascript using cloudmode service.
I am having one polyline, I am getting each point of polyline & passing two consecutive points to service to get response from it.
What I am doing is
function showPointsRoutes(e)
{
var a = e.target.getLatLngs();
for(var i = 1 ; i < a.length ; i++)
{
var as ="http://routes.cloudmade.com/BC9A493B41014CAABB98F0471D759707/api/0.3/" + a[i-1].lat+","+a[i-1].lng+","+a[i].lat+","+a[i].lng + "/car/shortest.js?callback=getRouteResponse";
addScript(as);
}
}
function getRouteResponse(response)
{
mytimeArray.push[response.route_summary.total_time];
myDistancArray.push[response.route_summary.total_distance];
}
function addScript(url)
{
var script = document.createElement('script');
script.type="text/javascript";
script.src=url;
document.getElementsByTagName('head') [0].appendChild(script);
}
But sometimes what happens is that response for some requests comes late.
due to this I am not getting proper times & distance between points.
Can any one suggest some way so that I get values in array as sequence in which request was made.
The reason they give you different callback names is so that you can differentiate requests by using a different callback for each one. Try something like this:
Note: #Rodrigo Assis's suggestion would be better in this case since the API supports this. Code below demonstrates a way to handle this when the API forces using multiple requests.
function showPointsRoutes(e)
{
var a = e.target.getLatLngs();
for(var i = 1 ; i < a.length ; i++)
requestRoute(i, a[i-1], a[i]);
}
function requestRoute(i, a, b)
{
window["getRouteResponse" + i] = function(response)
{
mytimeArray[i] = response.route_summary.total_time;
myDistancArray[i] = response.route_summary.total_distance;
};
var as ="http://routes.cloudmade.com/BC9A493B41014CAABB98F0471D759707/api/0.3/" + a.lat+","+a.lng+","+b.lat+","+b.lng + "/car/shortest.js?callback=getRouteResponse" + i;
addScript(as);
}
function addScript(url)
{
var script = document.createElement('script');
script.type="text/javascript";
script.src=url;
document.getElementsByTagName('head') [0].appendChild(script);
}
Using javascript I get list of facebook friends though it only returns name and id now, but I need to get the picture of each user. I try to loop through the response and then try to call the api to get picture, but due to it's async call I can't associate the returned picture with the index of the friend in the array. *this is kinda a problem that I've had with asynchronous programming in general, is there a standard pattern for this?
Example.
FB.api('me/friends', function(response) {
if(response.error == null){
var friendsSale = response.data;
var len = friendsSale.length;
for(var x=0; x<len; x++){
FB.api(friendsSale[x].id+'/picture', function(response) {
//x no longer is the same x as the initial call, and I can't pass in the orignal array object into the FB.api function to return as part of the response... or can I?
friendsSale[x].pictureUrl = response;
});
}
}
//Then how do I know when I have all the pictures set so I can then set datamodle with the complete friend array?
m.friends(friendsSale);
}
});
Yes, there is a pattern for this: a Closure
...
var len = friendsSale.length;
for (var i = 0; i < len; i++) {
(function() {
var j = i;
FB.api(friendsSale[i].id+'/picture', function(response) {
friendsSale[j].pictureUrl = response;
});
})();
}
To know when all all calls have returned you can simply keep a counter of returned calls, e.g.
...
var len = friendsSale.length;
var returnedCallsCounter = 0;
for (var i = 0; i < len; i++) {
(function() {
var j = i;
FB.api(friendsSale[i].id+'/picture', function(response) {
friendsSale[j].pictureUrl = response;
// Track number of returned calls
returnedCallsCounter++;
// Check if all calls have returned
if (returnedCallsCounter == len) {
m.friends(friendsSale);
}
});
})();
}
Simple solution for you :
All you have to do is query this :
https://graph.facebook.com/user_id/picture
and you will get the users profile picture. For example :
Querying https://graph.facebook.com/4/picture (with no access token BTW - try it in chrome pron incognito mode) :
<img src="https://graph.facebook.com/4/picture">
will yeild this smiling face :
Now you know Marks fbid :P