firebase presence becomes more and more wrong over time - javascript

I have a simple presence user-count set up for firebase based on their example. The problem is that it relies on removing counts on disconnect. However, firebase seems to go down every 2 months and removes the ondisconnect handlers. This means that over time the counts get more and more wrong. Is there any way to fix this?
ty.Presence = function() {
this.rooms = {}
this.presence = fb.child('presence')
this.connectedRef = fb.child('.info/connected');
if (!localStorage.fb_presence_id) {
localStorage.fb_presence_id = Math.random().toString(36).slice(2)
}
this.browserID = localStorage.fb_presence_id
var first = false
}
ty.Presence.prototype.add = function(roomID, userobj) {
var self = this
var userListRef = this.presence.child(roomID)
// Generate a reference to a new location for my user with push.
var obj = {
s: "on",
id: this.browserID
}
if (userobj) {
obj.u = {
_id: userobj._id,
n: userobj.username
}
if (userobj.a) {
obj.u.a = userobj.a
}
}
var myUserRef = userListRef.push(obj)
this.rooms[roomID] = myUserRef
this.connectedRef.on("value", function(isOnline) {
if (isOnline.val()) {
// If we lose our internet connection, we want ourselves removed from the list.
myUserRef.onDisconnect().remove();
}
});
};
ty.Presence.prototype.count = function(roomID, cb) {
var self = this
var userListRef = this.presence.child(roomID)
var count = 0
function res () {
var usersArr = _.pluck(users, 'id')
usersArr = _.uniq(usersArr)
count = usersArr.length
if (cb) cb(count)
}
var users = {}
userListRef.on("child_added", function(css) {
users[css.name()] = css.val();
res()
});
userListRef.on("child_removed", function(css) {
delete users[css.name()]
res()
});
cb(count)
};
ty.Presence.prototype.get = function(ref) {
return this[ref]
};
ty.Presence.prototype.setGlobal = function(object) {
var self = this
_.each(this.rooms, function (myUserRef) {
myUserRef.set(object)
})
};
ty.Presence.prototype.remove = function(roomID) {
if (this.rooms[roomID])
this.rooms[roomID].remove();
};
ty.Presence.prototype.off = function(roomID) {
var userListRef = this.presence.child(roomID)
userListRef.off()
};
ty.presence = new ty.Presence()
ty.presence.add('all')

The onDisconnect handlers can be lost if a Firebase is restarted (e.g. when a new release is pushed live). One simple approach is to attach a timestamp as a priority to the records when they are stored. As long as the client remains online, have him update the timestamp occasionally.
setInterval(function() {
connectedRef.setPriority(Date.now());
}, 1000*60*60*4 /* every 4 hours */ );
Thus, any record which reaches, say, 24 hours old, would obviously be an orphan. A challenge could take place by clients (e.g. when a new client receives the list for the first time) or by a server process (e.g. a node.js script with a setInterval() to check for records older than X).
presenceRef.endAt(Date.now()-24*60*60*1000 /* 24 hours ago */).remove();
Less than ideal, sure, but a functional workaround I've utilized in apps.

Related

JavaScript - Issues recovering a map in an object after being saved in localStorage

I've been dealing with this for some time. I've a list of sections in which the user checks some checkboxes and that is sent to the server via AJAX. However, since the user can return to previous sections, I'm using some objects of mine to store some things the user has done (if he/she already finished working in that section, which checkboxes checked, etc). I'm doing this to not overload the database and only send new requests to store information if the user effectively changes a previous checkbox, not if he just starts clicking "Save" randomly. I'm using objects to see the sections of the page, and storing the previous state of the checkboxes in a Map. Here's my "supervisor":
function Supervisor(id) {
this.id = id;
this.verif = null;
this.selections = new Map();
var children = $("#ContentPlaceHolder1_checkboxes_div_" + id).children().length;
for (var i = 0; i < children; i++) {
if (i % 2 == 0) {
var checkbox = $("#ContentPlaceHolder1_checkboxes_div_" + id).children()[i];
var idCheck = checkbox.id.split("_")[2];
this.selections.set(idCheck, false);
}
}
console.log("Length " + this.selections.size);
this.change = false;
}
The console.log gives me the expected output, so I assume my Map is created and initialized correctly. Since the session of the user can expire before he finishes his work, or he can close his browser by accident, I'm storing this object using local storage, so I can change the page accordingly to what he has done should anything happen. Here are my functions:
function setObj(id, supervisor) {
localStorage.setItem(id, JSON.stringify(supervisor));
}
function getObj(key) {
var supervisor = JSON.parse(localStorage.getItem(key));
return supervisor;
}
So, I'm trying to add to the record whenever an user clicks in a checkbox. And this is where the problem happens. Here's the function:
function checkboxClicked(idCbx) {
var idSection = $("#ContentPlaceHolder1_hdnActualField").val();
var supervisor = getObj(idSection);
console.log(typeof (supervisor)); //Returns object, everythings fine
console.log(typeof (supervisor.change)); //Returns boolean
supervisor.change = true;
var idCheck = idCbx.split("_")[2]; //I just want a part of the name
console.log(typeof(supervisor.selections)); //Prints object
console.log("Length " + supervisor.selections.size); //Undefined!
supervisor.selections.set(idCheck, true); //Error! Note: The true is just for testing purposes
setObj(idSection, supervisor);
}
What am I doing wrong? Thanks!
Please look at this example, I removed the jquery id discovery for clarity. You'll need to adapt this to meet your needs but it should get you mostly there.
const mapToJSON = (map) => [...map];
const mapFromJSON = (json) => new Map(json);
function Supervisor(id) {
this.id = id;
this.verif = null;
this.selections = new Map();
this.change = false;
this.selections.set('blah', 'hello');
}
Supervisor.from = function (data) {
const id = data.id;
const supervisor = new Supervisor(id);
supervisor.verif = data.verif;
supervisor.selections = new Map(data.selections);
return supervisor;
};
Supervisor.prototype.toJSON = function() {
return {
id: this.id,
verif: this.verif,
selections: mapToJSON(this.selections)
}
}
const expected = new Supervisor(1);
console.log(expected);
const json = JSON.stringify(expected);
const actual = Supervisor.from(JSON.parse(json));
console.log(actual);
If you cant use the spread operation in 'mapToJSON' you could loop and push.
const mapToJSON = (map) => {
const result = [];
for (let entry of map.entries()) {
result.push(entry);
}
return result;
}
Really the only thing id change is have the constructor do less, just accept values, assign with minimal fiddling, and have a factory query the dom and populate the constructor with values. Maybe something like fromDOM() or something. This will make Supervisor more flexible and easier to test.
function Supervisor(options) {
this.id = options.id;
this.verif = null;
this.selections = options.selections || new Map();
this.change = false;
}
Supervisor.fromDOM = function(id) {
const selections = new Map();
const children = $("#ContentPlaceHolder1_checkboxes_div_" + id).children();
for (var i = 0; i < children.length; i++) {
if (i % 2 == 0) {
var checkbox = children[i];
var idCheck = checkbox.id.split("_")[2];
selections.set(idCheck, false);
}
}
return new Supervisor({ id: id, selections: selections });
};
console.log(Supervisor.fromDOM(2));
You can keep going and have another method that tries to parse a Supervisor from localStorageand default to the dom based factory if the localStorage one returns null.

web based firebase code running multiple times when executed

I am trying to update some data into my Firebase realtime database, but for some reason the code runs multiple times when i present it with a new string, this messes up the output of the code.
Below is what I have tried, I tried to create a new function for the ref.update(), but the same thing happens again, I have pointed in the comments of the code where exactly the code goes back to.
function fire_base_db() {
var ref = firebase.database().ref();
ref.on("value", function(snapshot) {
r = snapshot.val();
var ham_db = r.hams.spam_words;
var spam_db = r.spams.spam_words; //contains spam data now
console.log('function 1')
inputstring(ham_db, spam_db);
}, function(error) {
console.log("Error: " + error.code);
});
}
inputstring(ham_db, spam_db); //just a random function i need
spam_prop(user_string_toknized, spam_db, ham_db); //yet another
function spam_or_ham()
function spam_or_ham() {
var final_value = " ";
if (total_spam_probablity < total_ham_probablity) {
console.log("ham");
final_value = "ham";
} else {
console.log("spam");
final_value = "spam";
}
if (final_value = "spam") {
var ref = firebase.database().ref("hams/spam_words/");
ref.update(old_words_spam_2);
} else if (final_value = "ham") {
var ref2 = firebase.database().ref("spams/spam_words/")
ref2.update(old_words_ham_2)
};
for (var a in new_words_spam) {
new_words_spam[b] = 1
}
for (var b in new_words_ham) {
new_words_ham[a] = 1;
}
if (final_value = "spam") {
var ref9 = firebase.database().ref("spams/spam_words/")
ref9.update(new_words_spam)
} else if (final_value = "ham") {
var ref2 = firebase.database().ref("hams/spam_words")
ref2.update(new_words_ham)
}
}
fire_base_db_upadt_new_words();
fire_base_db_upadt_new_words_2();
The first function fire_base_db() is used to read data from the database, the next 2 functions are just some steps for the output, the last function spam_or_ham is where the bug appears, moment the code enters the if statement and reaches the ref9.update part, it runs back to ref.on in the first function and run multiple times, each time executing till the ref9 part, except in the last execution where the whole code is executed, I want the full code to be executed in the first time itself.

Workaround for new Edge Indexeddb bug?

Microsoft released update kb4088776 in the past couple days, which has had a devastating effect on performance of indexedDb openCursor.
The simple fiddle here shows the problem. With the update, the "retrieval" time is 40 seconds or more. Prior to the update, it is around 1 second.
https://jsfiddle.net/L7q55ad6/23/
Relevant retrieval portion is here:
var _currentVer = 1;
function _openDatabase(fnSuccess) {
var _custDb = window.indexedDB.open("MyDatabase", _currentVer);
_custDb.onsuccess = function (event) {
var db = event.target.result;
fnSuccess(db);
}
_custDb.onerror = function (event) {
_custDb = null;
fnSuccess(null); // should use localData
}
_custDb.onupgradeneeded = function (event) {
var db = event.target.result;
var txn = event.target.transaction;
// Create an objectStore for this database
if (event.oldVersion < _currentVer) {
var customer = db.createObjectStore("customer", { keyPath: "guid" });
var index = customer.createIndex("by_id", "id", { unique: false });
}
};
}
function _retrieveCustomers(fn) {
_openDatabase(function (db) {
if (db == null)
{
alert("not supported");
return;
}
var customers = [];
var transaction = db.transaction("customer", "readonly");
var objectStore = transaction.objectStore("customer");
if (typeof objectStore.getAll === 'function') {
console.log("using getAll");
objectStore.getAll().onsuccess = function (event) {
fn(event.target.result);
};
}
else {
console.log("using openCursor");
objectStore.openCursor().onsuccess = function (event) {
var cursor = event.target.result;
if (cursor) {
customers.push(cursor.value);
cursor.continue();
}
else {
fn(customers);
}
};
}
});
}
The time to create and add the customers is basically normal, only the retrieval is bad. Edge has never supported the getAll method and it still doesn't after the update.
The only workaround I can think of would be to use localStorage instead, but unfortunately our data set is too large to fit into the 10MB limit. It is actually faster now to retrieve from our servers and convert the text to javascript objects, defeating the main purpose of indexeddb.
I don't have Edge so I can't test this, but does it happen with get too, or just openCursor? If get still performs well, you could store an index (in your example, the list of primary keys; in your real app, maybe something more complicated) in localStorage, and then use that to call get on each one.

Firebase: Run a query synchronously

I am trying to set some user data depending on the no.of users already in my USERS COLLECTION. This even includes a userId which should be a number.
exports.setUserData = functions.firestore.document('/users/{documentId}')
.onCreate(event => {
return admin.firestore().collection('users')
.orderBy('userId', 'desc').limit(1)
.get().then(function(snapshot) {
const user = snapshot.docs[0].data();
var lastUserId = user.userId;
var userObject = {
userId: lastUserId + 1,... some other fields here
};
event.data.ref.set(userObject, {
merge: true
});
});
});
One issue I noticed here, quickly adding 2 users result in those documents having the same userId may be because the get() query is asynchronous?
Is there a way to make this whole setUserData method synchronous?
There is no way to make Cloud Functions run your function invocations sequentially. That would also be quite contrary to the serverless promise of auto-scaling to demands.
But in your case there's a much simpler, lower level primitive to get a sequential ID. You should store the last known user ID in the database and then use a transaction to read/update it.
var counterRef = admin.firestore().collection('counters').doc('userid');
return db.runTransaction(function(transaction) {
// This code may get re-run multiple times if there are conflicts.
return transaction.get(counterRef).then(function(counterDoc) {
var newValue = (counterDoc.data() || 0) + 1;
transaction.update(counterRef, newValue);
});
});
Solution
var counterRef = admin.firestore().collection('counters').doc('userId');
return admin.firestore().runTransaction(function(transaction) {
// This code may get re-run multiple times if there are conflicts.
return transaction.get(counterRef).then(function(counterDoc) {
var newValue = (counterDoc.data().value || 0) + 1;
transaction.update(counterRef, {
"value": newValue
});
});
}).then(t => {
admin.firestore().runTransaction(function(transaction) {
// This code may get re-run multiple times if there are conflicts.
return transaction.get(counterRef).then(function(counterDoc) {
var userIdCounter = counterDoc.data().value || 0;
var userObject = {
userId: userIdCounter
};
event.data.ref.set(userObject, {
merge: true
});
});
})
});

IndexedDB via Lawnchair gets larger with every save

I am using Lawnchair.js on a mobile app I am building at work targeting iOS, Android, and Windows phones. My question is I have a relatively simple function(see below), that reads data from an object and saves it in the indexeddb database. It's about 4MB of data and on the first go round when I inspect in Internet explorer(via internet options), I can see the database is about 7MB. If I reload the page and re-run the same function with the same data, it increases to 14MB and then 20MB. Im using the same keys so my understanding is that this should just update the record but it's almost as if it's just inserting all new records every time. I have also had similar behavior using Lawnchair on mobile safari using websql adapter. Has anyone seen this before or have any suggestions as to why this might be ??.
The following code is from a function I am using to populate the database.
populateDatabase: function(database,callback) {
'use strict';
var key;
try {
for(key in MasterData){
if(MasterData.hasOwnProperty(key)){
var itemInfo = DataConfig.checkForDataUpdates[DataConfig.keyMap[key]];
database.save({key:itemInfo["name"],hash:itemInfo["version"],url:itemInfo["url"],data:MasterData[key]});
}
}
callback(true);
} catch(e){
callback(false);
}
}
MasterData is the large data file and itemInfo contains the key name, a hash that is later used to check an api for updates, and the relative url of where to update from. After I create the database I pass it into this function and then pass back true if the inserts are successful and false otherwise.
As previously mentioned, I have seen similar issues in iOS where calling database.save() was allocating a lot of memory but not releasing it and eventually causing a crash if it populated the database and then tried to update some records. Removing Lawnchair from the equation has kept it from crashing but it is still allocating a lot of memory when saving data. Not sure if this is normal for persistent storage on mobile devices, a bug in Lawnchair, or me being a noob and doing something terribly wrong but I could use some pointers on this as well as why indexeddb just keeps getting larger and larger on every save (at least during initial testing in IE10)??
EDIT: Source Code for indexed-db adapter is here:
https://github.com/brianleroux/lawnchair/blob/master/src/adapters/indexed-db.js
and here is the code for the save function I am using:
save:function(obj, callback) {
var self = this;
if(!this.store) {
this.waiting.push(function() {
this.save(obj, callback);
});
return;
}
var objs = (this.isArray(obj) ? obj : [obj]).map(function(o){if(!o.key) { o.key = self.uuid()} return o})
var win = function (e) {
if (callback) { self.lambda(callback).call(self, self.isArray(obj) ? objs : objs[0] ) }
};
var trans = this.db.transaction(this.record, READ_WRITE);
var store = trans.objectStore(this.record);
for (var i = 0; i < objs.length; i++) {
var o = objs[i];
store.put(o, o.key);
}
store.transaction.oncomplete = win;
store.transaction.onabort = fail;
return this;
},
When Creating a new instance, Lawnchair uses the init function from the indexed-db adapter which is the following.
init:function(options, callback) {
this.idb = getIDB();
this.waiting = [];
this.useAutoIncrement = useAutoIncrement();
var request = this.idb.open(this.name, STORE_VERSION);
var self = this;
var cb = self.fn(self.name, callback);
if (cb && typeof cb != 'function') throw 'callback not valid';
var win = function() {
// manually clean up event handlers on request; this helps on chrome
request.onupgradeneeded = request.onsuccess = request.error = null;
if(cb) return cb.call(self, self);
};
var upgrade = function(from, to) {
// don't try to migrate dbs, just recreate
try {
self.db.deleteObjectStore('teststore'); // old adapter
} catch (e1) { /* ignore */ }
try {
self.db.deleteObjectStore(self.record);
} catch (e2) { /* ignore */ }
// ok, create object store.
var params = {};
if (self.useAutoIncrement) { params.autoIncrement = true; }
self.db.createObjectStore(self.record, params);
self.store = true;
};
request.onupgradeneeded = function(event) {
self.db = request.result;
self.transaction = request.transaction;
upgrade(event.oldVersion, event.newVersion);
// will end up in onsuccess callback
};
request.onsuccess = function(event) {
self.db = event.target.result;
if(self.db.version != (''+STORE_VERSION)) {
// DEPRECATED API: modern implementations will fire the
// upgradeneeded event instead.
var oldVersion = self.db.version;
var setVrequest = self.db.setVersion(''+STORE_VERSION);
// onsuccess is the only place we can create Object Stores
setVrequest.onsuccess = function(event) {
var transaction = setVrequest.result;
setVrequest.onsuccess = setVrequest.onerror = null;
// can't upgrade w/o versionchange transaction.
upgrade(oldVersion, STORE_VERSION);
transaction.oncomplete = function() {
for (var i = 0; i < self.waiting.length; i++) {
self.waiting[i].call(self);
}
self.waiting = [];
win();
};
};
setVrequest.onerror = function(e) {
setVrequest.onsuccess = setVrequest.onerror = null;
console.error("Failed to create objectstore " + e);
fail(e);
};
} else {
self.store = true;
for (var i = 0; i < self.waiting.length; i++) {
self.waiting[i].call(self);
}
self.waiting = [];
win();
}
}
request.onerror = function(ev) {
if (request.errorCode === getIDBDatabaseException().VERSION_ERR) {
// xxx blow it away
self.idb.deleteDatabase(self.name);
// try it again.
return self.init(options, callback);
}
console.error('Failed to open database');
};
},
I think you keep adding data instead of updating the present data.
Can you provide some more information about the configuration of the store. Are you using an inline or external key? If it's an internal what is the keypath.

Categories

Resources