Google Cloud SQL not updating with script - javascript

I have a long script which is designed to edit a specific row in the Cloud SQL table. The code is long so i will shorten it.
Client Side:
function build_profile(){
var cbid = sessionStorage.getItem("client_id");
var self = this;
var createSuccess = function(data){
var statuse = ["Active", "Wiating", "Discharged"];
if(data !== false){
data = data.split(",");
var dec = app.pages.Profile.descendants;
dec.fname.text = data[1];
dec.sname.text = data[3];
sessionStorage.setItem("school_id", data[9]);
app.popups.Loading.visible = false;
}
};
var init = function() {google.script.run.withSuccessHandler(createSuccess).get_user_data(cbid);};
app.popups.Loading.visible = true;
init();
}
function save_profile() {
var createSuccess = function(data){
var dec = app.pages.Profile.descendants;
console.log(data);
if(data !== -1){
var ds = app.datasources.Clients;
ds.load(function(){
ds.selectIndex(data);
console.log("editing:"+ds.item.CBID);
ds.item.fname = dec.fname_edit.value;
ds.item.sname = dec.sname_edit.value;
ds.load(function(){build_profile();});
});
}
}};
var init = function() {google.script.run.withSuccessHandler(createSuccess).update_client(sessionStorage.getItem("client_id"));};
init();
}
Server Side:
function get_user_data(cbid){
try{
var query = app.models.Clients.newQuery();
query.filters.CBID._equals = parseInt(cbid);
var results = query.run();
if(results.length > 0){
var arr = [
results[0].Id, //0
results[0].fname, //1
results[0].sname //3
];
return arr.join(",");
}else{
return false;
}
}catch(e){
console.error(e);
console.log("function get_user_data");
return false;
}
}
function update_client(cbid) {
try{
var ds = app.models.Clients;
var query = ds.newQuery();
query.filters.CBID._equals = parseInt(cbid);
var results = query.run();
if(results.length > 0){
var id = results[0]._key;
return id+1;
}else{
return -1;
}
}catch(e){
console.error(e);
return -1;
}
}
This gets the Clients table and updates the row for the selected client, then rebuilds the profile with the new information.
EDIT: I have managed to get to a point where its telling me that i cannot run the query (ds.load()) while processing its results. There does not seem to be a manual check to see if it has processed?
Note: datasource.saveChanges() does not work as it saves automatically.

You error is being produced by the client side function save_profile() and it is exactly in this block:
ds.load(function(){
ds.selectIndex(data);
console.log("editing:"+ds.item.CBID);
ds.item.fname = dec.fname_edit.value;
ds.item.sname = dec.sname_edit.value;
ds.load(function(){build_profile();});
});
So what you are doing is reloading the datasource almost immediately before it finishes loading hence you are getting that error
cannot run the query (ds.load()) while processing its results
This is just a matter of timing. A setTimeout can take of the issue. Just do the following:
ds.load(function(){
ds.selectIndex(data);
console.log("editing:"+ds.item.CBID);
ds.item.fname = dec.fname_edit.value;
ds.item.sname = dec.sname_edit.value;
setTimeout(function(){
ds.load(function(){build_profile();});
},1000);
});

I have manage to find a solution to this particular issue. It requires Manual Saving but it saves a lot of hassle as one of the inbuilt solutions can be used rather than relying on dealing with errors or timeouts.
function client_query_and_result(){
var createSuccess = function(data){ //callback function
console.log(data);
};
app.datasources.SomeTable.saveChanges(function(){//ensures all changes have been saved
app.datasources.SomeTable.load(function(){//makes sure to reload the datasource
google.script.run.withSuccessHandler(createSuccess).server_query_and_result(); //at this point All data has been saved and reloaded
});
});
}
The Server side code is the exact same methods. To enable manual saving you can select the table in App Maker -> Datasources -> check "Manual save mode".
Hope this can be useful to someone else.

Related

web based firebase code running multiple times when executed

I am trying to update some data into my Firebase realtime database, but for some reason the code runs multiple times when i present it with a new string, this messes up the output of the code.
Below is what I have tried, I tried to create a new function for the ref.update(), but the same thing happens again, I have pointed in the comments of the code where exactly the code goes back to.
function fire_base_db() {
var ref = firebase.database().ref();
ref.on("value", function(snapshot) {
r = snapshot.val();
var ham_db = r.hams.spam_words;
var spam_db = r.spams.spam_words; //contains spam data now
console.log('function 1')
inputstring(ham_db, spam_db);
}, function(error) {
console.log("Error: " + error.code);
});
}
inputstring(ham_db, spam_db); //just a random function i need
spam_prop(user_string_toknized, spam_db, ham_db); //yet another
function spam_or_ham()
function spam_or_ham() {
var final_value = " ";
if (total_spam_probablity < total_ham_probablity) {
console.log("ham");
final_value = "ham";
} else {
console.log("spam");
final_value = "spam";
}
if (final_value = "spam") {
var ref = firebase.database().ref("hams/spam_words/");
ref.update(old_words_spam_2);
} else if (final_value = "ham") {
var ref2 = firebase.database().ref("spams/spam_words/")
ref2.update(old_words_ham_2)
};
for (var a in new_words_spam) {
new_words_spam[b] = 1
}
for (var b in new_words_ham) {
new_words_ham[a] = 1;
}
if (final_value = "spam") {
var ref9 = firebase.database().ref("spams/spam_words/")
ref9.update(new_words_spam)
} else if (final_value = "ham") {
var ref2 = firebase.database().ref("hams/spam_words")
ref2.update(new_words_ham)
}
}
fire_base_db_upadt_new_words();
fire_base_db_upadt_new_words_2();
The first function fire_base_db() is used to read data from the database, the next 2 functions are just some steps for the output, the last function spam_or_ham is where the bug appears, moment the code enters the if statement and reaches the ref9.update part, it runs back to ref.on in the first function and run multiple times, each time executing till the ref9 part, except in the last execution where the whole code is executed, I want the full code to be executed in the first time itself.

Workaround for new Edge Indexeddb bug?

Microsoft released update kb4088776 in the past couple days, which has had a devastating effect on performance of indexedDb openCursor.
The simple fiddle here shows the problem. With the update, the "retrieval" time is 40 seconds or more. Prior to the update, it is around 1 second.
https://jsfiddle.net/L7q55ad6/23/
Relevant retrieval portion is here:
var _currentVer = 1;
function _openDatabase(fnSuccess) {
var _custDb = window.indexedDB.open("MyDatabase", _currentVer);
_custDb.onsuccess = function (event) {
var db = event.target.result;
fnSuccess(db);
}
_custDb.onerror = function (event) {
_custDb = null;
fnSuccess(null); // should use localData
}
_custDb.onupgradeneeded = function (event) {
var db = event.target.result;
var txn = event.target.transaction;
// Create an objectStore for this database
if (event.oldVersion < _currentVer) {
var customer = db.createObjectStore("customer", { keyPath: "guid" });
var index = customer.createIndex("by_id", "id", { unique: false });
}
};
}
function _retrieveCustomers(fn) {
_openDatabase(function (db) {
if (db == null)
{
alert("not supported");
return;
}
var customers = [];
var transaction = db.transaction("customer", "readonly");
var objectStore = transaction.objectStore("customer");
if (typeof objectStore.getAll === 'function') {
console.log("using getAll");
objectStore.getAll().onsuccess = function (event) {
fn(event.target.result);
};
}
else {
console.log("using openCursor");
objectStore.openCursor().onsuccess = function (event) {
var cursor = event.target.result;
if (cursor) {
customers.push(cursor.value);
cursor.continue();
}
else {
fn(customers);
}
};
}
});
}
The time to create and add the customers is basically normal, only the retrieval is bad. Edge has never supported the getAll method and it still doesn't after the update.
The only workaround I can think of would be to use localStorage instead, but unfortunately our data set is too large to fit into the 10MB limit. It is actually faster now to retrieve from our servers and convert the text to javascript objects, defeating the main purpose of indexeddb.
I don't have Edge so I can't test this, but does it happen with get too, or just openCursor? If get still performs well, you could store an index (in your example, the list of primary keys; in your real app, maybe something more complicated) in localStorage, and then use that to call get on each one.

Is there any better way to create a real time leaderboard for heavy traffic webpage?

I have made a leaderboard in a web app using datas fetched from mysql database in php.The webpage has other functionalities as well with other sql queries.The reference for leaderboard was
what is the best practice leaderboard with php, mysql, memcached? ,
the question that worries me is there a better approach to consider
for making leaderboard apps ?
Meanwhile i came across Firebase and was able to run code in Fiddle which is given below :
var LEADERBOARD_SIZE = 6;
var scoreListRef = new Firebase('https://i62qrqslypp.firebaseio-demo.com//scoreList');
var htmlForPath = {};
function handleScoreAdded(scoreSnapshot, prevScoreName) {
var newScoreRow = $("<tr/>");
newScoreRow.append($("<td/>").append($("<em/>").text(scoreSnapshot.val().name)));
newScoreRow.append($("<td/>").text(scoreSnapshot.val().score));
htmlForPath[scoreSnapshot.key()] = newScoreRow;
// Insert the new score in the appropriate place in the table.
if (prevScoreName === null) {
$("#leaderboardTable").append(newScoreRow);
}
else {
var lowerScoreRow = htmlForPath[prevScoreName];
lowerScoreRow.before(newScoreRow);
}
}
function handleScoreRemoved(scoreSnapshot) {
var removedScoreRow = htmlForPath[scoreSnapshot.key()];
removedScoreRow.remove();
delete htmlForPath[scoreSnapshot.key()];
}
var scoreListView = scoreListRef.limitToLast(LEADERBOARD_SIZE);
scoreListView.on('child_added', function (newScoreSnapshot, prevScoreName) {
handleScoreAdded(newScoreSnapshot, prevScoreName);
});
scoreListView.on('child_removed', function (oldScoreSnapshot) {
handleScoreRemoved(oldScoreSnapshot);
});
var changedCallback = function (scoreSnapshot, prevScoreName) {
handleScoreRemoved(scoreSnapshot);
handleScoreAdded(scoreSnapshot, prevScoreName);
};
scoreListView.on('child_moved', changedCallback);
scoreListView.on('child_changed', changedCallback);
$("#scoreInput").keypress(function (e) {
if (e.keyCode == 13) {
var newScore = Number($("#scoreInput").val());
var name = $("#nameInput").val();
$("#scoreInput").val("");
if (name.length === 0)
return;
var userScoreRef = scoreListRef.child(name);
userScoreRef.setWithPriority({ name:name, score:newScore }, newScore);
}
});
Output Leaderboard:
Im new to Firebase as well,please guide me on the issue
FIDDLE LINK

IndexedDB via Lawnchair gets larger with every save

I am using Lawnchair.js on a mobile app I am building at work targeting iOS, Android, and Windows phones. My question is I have a relatively simple function(see below), that reads data from an object and saves it in the indexeddb database. It's about 4MB of data and on the first go round when I inspect in Internet explorer(via internet options), I can see the database is about 7MB. If I reload the page and re-run the same function with the same data, it increases to 14MB and then 20MB. Im using the same keys so my understanding is that this should just update the record but it's almost as if it's just inserting all new records every time. I have also had similar behavior using Lawnchair on mobile safari using websql adapter. Has anyone seen this before or have any suggestions as to why this might be ??.
The following code is from a function I am using to populate the database.
populateDatabase: function(database,callback) {
'use strict';
var key;
try {
for(key in MasterData){
if(MasterData.hasOwnProperty(key)){
var itemInfo = DataConfig.checkForDataUpdates[DataConfig.keyMap[key]];
database.save({key:itemInfo["name"],hash:itemInfo["version"],url:itemInfo["url"],data:MasterData[key]});
}
}
callback(true);
} catch(e){
callback(false);
}
}
MasterData is the large data file and itemInfo contains the key name, a hash that is later used to check an api for updates, and the relative url of where to update from. After I create the database I pass it into this function and then pass back true if the inserts are successful and false otherwise.
As previously mentioned, I have seen similar issues in iOS where calling database.save() was allocating a lot of memory but not releasing it and eventually causing a crash if it populated the database and then tried to update some records. Removing Lawnchair from the equation has kept it from crashing but it is still allocating a lot of memory when saving data. Not sure if this is normal for persistent storage on mobile devices, a bug in Lawnchair, or me being a noob and doing something terribly wrong but I could use some pointers on this as well as why indexeddb just keeps getting larger and larger on every save (at least during initial testing in IE10)??
EDIT: Source Code for indexed-db adapter is here:
https://github.com/brianleroux/lawnchair/blob/master/src/adapters/indexed-db.js
and here is the code for the save function I am using:
save:function(obj, callback) {
var self = this;
if(!this.store) {
this.waiting.push(function() {
this.save(obj, callback);
});
return;
}
var objs = (this.isArray(obj) ? obj : [obj]).map(function(o){if(!o.key) { o.key = self.uuid()} return o})
var win = function (e) {
if (callback) { self.lambda(callback).call(self, self.isArray(obj) ? objs : objs[0] ) }
};
var trans = this.db.transaction(this.record, READ_WRITE);
var store = trans.objectStore(this.record);
for (var i = 0; i < objs.length; i++) {
var o = objs[i];
store.put(o, o.key);
}
store.transaction.oncomplete = win;
store.transaction.onabort = fail;
return this;
},
When Creating a new instance, Lawnchair uses the init function from the indexed-db adapter which is the following.
init:function(options, callback) {
this.idb = getIDB();
this.waiting = [];
this.useAutoIncrement = useAutoIncrement();
var request = this.idb.open(this.name, STORE_VERSION);
var self = this;
var cb = self.fn(self.name, callback);
if (cb && typeof cb != 'function') throw 'callback not valid';
var win = function() {
// manually clean up event handlers on request; this helps on chrome
request.onupgradeneeded = request.onsuccess = request.error = null;
if(cb) return cb.call(self, self);
};
var upgrade = function(from, to) {
// don't try to migrate dbs, just recreate
try {
self.db.deleteObjectStore('teststore'); // old adapter
} catch (e1) { /* ignore */ }
try {
self.db.deleteObjectStore(self.record);
} catch (e2) { /* ignore */ }
// ok, create object store.
var params = {};
if (self.useAutoIncrement) { params.autoIncrement = true; }
self.db.createObjectStore(self.record, params);
self.store = true;
};
request.onupgradeneeded = function(event) {
self.db = request.result;
self.transaction = request.transaction;
upgrade(event.oldVersion, event.newVersion);
// will end up in onsuccess callback
};
request.onsuccess = function(event) {
self.db = event.target.result;
if(self.db.version != (''+STORE_VERSION)) {
// DEPRECATED API: modern implementations will fire the
// upgradeneeded event instead.
var oldVersion = self.db.version;
var setVrequest = self.db.setVersion(''+STORE_VERSION);
// onsuccess is the only place we can create Object Stores
setVrequest.onsuccess = function(event) {
var transaction = setVrequest.result;
setVrequest.onsuccess = setVrequest.onerror = null;
// can't upgrade w/o versionchange transaction.
upgrade(oldVersion, STORE_VERSION);
transaction.oncomplete = function() {
for (var i = 0; i < self.waiting.length; i++) {
self.waiting[i].call(self);
}
self.waiting = [];
win();
};
};
setVrequest.onerror = function(e) {
setVrequest.onsuccess = setVrequest.onerror = null;
console.error("Failed to create objectstore " + e);
fail(e);
};
} else {
self.store = true;
for (var i = 0; i < self.waiting.length; i++) {
self.waiting[i].call(self);
}
self.waiting = [];
win();
}
}
request.onerror = function(ev) {
if (request.errorCode === getIDBDatabaseException().VERSION_ERR) {
// xxx blow it away
self.idb.deleteDatabase(self.name);
// try it again.
return self.init(options, callback);
}
console.error('Failed to open database');
};
},
I think you keep adding data instead of updating the present data.
Can you provide some more information about the configuration of the store. Are you using an inline or external key? If it's an internal what is the keypath.

How to deal with asyncronous javascript in loops?

I have a forloop like this:
for (var name in myperson.firstname){
var myphone = new phone(myperson, firstname);
myphone.get(function(phonenumbers){
if(myphone.phonearray){
myperson.save();
//Can I put a break here?;
}
});
}
What it does is that it searches for phone-numbers in a database based on various first-names. What I want to achieve is that once it finds a number associated with any of the first names, it performs myperson.save and then stops all the iterations, so that no duplicates get saved. Sometimes, none of the names return any phone-numbers.
myphone.get contains a server request and the callback is triggered on success
If I put a break inside the response, what will happen with the other iterations of the loop? Most likely the other http-requests have already been initiated. I don't want them to perform the save. One solution I have thought of is to put a variable outside of the forloop and set it to save, and then check when the other callbacks get's triggered, but I'm not sure if that's the best way to go.
You could write a helper function to restrict invocations:
function callUntilTrue(cb) {
var done = false;
return function () {
if (done) {
log("previous callback succeeded. not calling others.");
return;
}
var res = cb.apply(null, arguments);
done = !! res;
};
}
var myperson = {
firstname: {
"tom": null,
"jerry": null,
"micky": null
},
save: function () {
log("save " + JSON.stringify(this, null, 2));
}
};
var cb = function (myperson_, phonenumbers) {
if (myperson_.phonearray) {
log("person already has phone numbers. returning.");
return false;
}
if (phonenumbers.length < 1) {
log("response has no phone numbers. returning.");
return false;
}
log("person has no existing phone numbers. saving ", phonenumbers);
myperson_.phonearray = phonenumbers;
myperson_.save();
return true;
};
var restrictedCb = callUntilTrue(cb.bind(null, myperson));
for (var name in myperson.firstname) {
var myphone = new phone(myperson, name);
myphone.get(restrictedCb);
}
Sample Console:
results for tom-0 after 1675 ms
response has no phone numbers. returning.
results for jerry-1 after 1943 ms
person has no existing phone numbers. saving , [
"jerry-1-0-number"
]
save {
"firstname": {
"tom": null,
"jerry": null,
"micky": null
},
"phonearray": [
"jerry-1-0-number"
]
}
results for micky-2 after 4440 ms
previous callback succeeded. not calling others.
Full example in this jsfiddle with fake timeouts.
EDIT Added HTML output as well as console.log.
The first result callback will only ever happen after the loop, because of the single-threaded nature of javascript and because running code isn't interrupted if events arrive.
If you you still want requests to happen in parallel, you may use a flag
var saved = false;
for (var name in myperson.firstname){
var myphone = new phone(myperson, firstname /* name? */);
myphone.get(function(phonenumbers){
if (!saved && myphone.phonearray){
saved = true;
myperson.save();
}
});
}
This will not cancel any pending requests, however, just prevent the save once they return.
It would be better if your .get() would return something cancelable (the request itself, maybe).
var saved = false;
var requests = [];
for (var name in myperson.firstname){
var myphone = new phone(myperson, firstname /* name? */);
var r;
requests.push(r = myphone.get(function(phonenumbers){
// Remove current request.
requests = requests.filter(function(i) {
return r !== i;
});
if (saved || !myphone.phonearray) {
return;
}
saved = true;
// Kill other pending/unfinished requests.
requests.forEach(function(r) {
 r.abort();
});
myperson.save();
}));
}
Even better, don't start all requests at once. Instead construct an array of all possible combinations, have a counter (a semaphore) and only start X requests.
var saved = false;
var requests = [];
// Use requests.length as the implicit counter.
var waiting = []; // Wait queue.
for (var name in myperson.firstname){
var myphone = new phone(myperson, firstname /* name? */);
var r;
if (requests.length >= 4) {
// Put in wait queue instead.
waiting.push(myphone);
continue;
}
requests.push(r = myphone.get(function cb(phonenumbers){
// Remove current request.
requests = requests.filter(function(i) {
return r !== i;
});
if (saved) {
return;
}
if (!myphone.phonearray) {
// Start next request.
var w = waiting.shift();
if (w) {
requests.push(w.get(cb));
)
return;
}
saved = true;
// Kill other pending/unfinished requests.
requests.forEach(function(r) {
r.abort();
});
myperson.save();
}));
}

Categories

Resources