Check and Increment the Version Number of an IndexedDB. - javascript

I have an IndexedDB that is storing a large amount of dynamic data. (The static data is already cached by a Service Worker)
My problem is as this data is dynamic, I need the IndexedDB to be cleared and it to be restored each time the application is opened. For this, I need the version number to be incremented so that the onupgradeneeded event is fired. I can't think of any logical way to do this, and even using the following call in the onupgradeneeded event I get an undefined answer.
e.target.result.oldversion
My IndexedDB code is as follows, with the parameteres being:
Key - The name of the JSON object to store in the database.
Value - The JSON object itself.
function dbInit(key, value) {
// Open (or create) the database
var open = indexedDB.open("MyDatabase", 1);
console.log(value);
// Create the schema
open.onupgradeneeded = function(e) {
console.log("Old Version: " + e.target.result.oldversion); //Undefined
console.log("New Version: " + e.target.result.newversion); //Undefined
var db = open.result;
var store = db.createObjectStore("Inspections", {keyPath: "id", autoIncrement: true});
var index = store.createIndex(key, key);
};
open.onsuccess = function() {
// Start a new transaction
var db = open.result;
var tx = db.transaction("Inspections", "readwrite");
var store = tx.objectStore("Inspections");
var index = store.index(key);
store.add(value);
// Close the db when the transaction is done
tx.oncomplete = function() {
db.close();
};
}
}
As this method is called several times for several 'Key' objects, I will need to work out a way to increment this Version number once per opening of the page, and then move the 'add' call to outside of the onupgradeneeded method - but for the moment the priority is making sure it runs through once - incrementing the version number, firing the onupgradeneeded, deleting the current data, storing the new data.
Thanks in advance!

The oldVersion and newVersion properties (note the capitalization) are on the IDBVersionChangeEvent, not on the IDBDatabase. In other words, this:
console.log("Old Version: " + e.target.result.oldversion); //Undefined
console.log("New Version: " + e.target.result.newversion); //Undefined
should be:
console.log("Old Version: " + e.oldVersion);
console.log("New Version: " + e.newVersion);
Now that said... you're using the schema versioning in a somewhat atypical way. If you really want to start with a fresh database each time the page is opened, just delete before opening:
indexedDB.deleteDatabase("MyDatabase");
var open = indexedDB.open("MyDatabase", 1);
// An open/delete requests are always processed in the order they
// are made, so the open will wait for the delete to run.
Note that the queued operations (delete and open) would then be blocked if another tab was holding an open connection and didn't respond to a versionchange event sent out in response to the delete request. Maybe that's a good thing in your case - it would prevent two tabs from partying on the database simultaneously.
A more typical usage pattern would be to only change the version when the web app is upgraded and the database schema is different. If you did need to wipe the data across sessions you'd do that on open, rather than on upgrade, and use things like clear() on the object store. But now we're getting into the design of your app, which it sounds like you've got a good handle on.

Related

Good practice for indexedDB add then get

I have the code below that inserts a todo object in an indexedDB object store - and then gets a copy of the stored object (see further down) and this works fine.
I'm concerned that I am reusing a transaction that might be unsafe to use - since the transaction has already succeeded.
Should I create another transaction for the get - or is this unnecessary?
// this is only called after the 'tasks' object store has been opened
const todoIDB = requestIDB.result
const transaction = todoIDB.transaction(["tasks"], "readwrite")
const todoStore = transaction.objectStore("tasks")
const addRequest = todoStore.add({text:txt_val})
addRequest.addEventListener("success", ()=>{
console.log("Added " + "#" + addRequest.result + ": " + txt_val)
// should I add a new transaction, etc. here?
const getRequest = todoStore.get(addRequest.result)
getRequest.addEventListener("success", ()=>{
console.log("Found " + JSON.stringify(getRequest.result))
})
})
Here is some (valid) output (from Chrome):
Added #18: aaa
Found {"text":"aaa","id":18}
Added #19: bbb
Found {"text":"bbb","id":19}
Transactions can span multiple requests, so this is fine. (Of course, if the add request fails - e.g. the record already exists - then "success" won't fire the get request won't happen.)
And to clarify a point - when you're observing the "success" event, it's the request that has succeeded, not the transaction. A "complete" or "abort" event will fire at the transaction object when the overall transaction has finished, i.e. when all of the individual requests have succeeded or one has failed and caused the transaction to fail.

update firebase entries on an interval

I want to be able to update certain values in a firebase entry on a minute interval. I am not sure how to do this but have tried the following:
var interval = setInterval(function(){
database.ref().on('value',function(snapshot){
snapshot.forEach(function(childSnapshot) {
var item = childSnapshot.val();
item.key = childSnapshot.key;
database.ref(item.key).update({ item.trainTimeLeft: moment(item.trainFrequency).subtract(moment(moment(Date.now())).diff(item.trainTimeStamp,"minutes"),"minutes") });
});
});
},60000);
I am using moment.js for time differences (I am trying to make an updating train schedule). All of the entries are on the main branch of firebase.
Use setInterval() to trigger your function every X mile seconds you want.
Use firebase.database.ref() to access your database node you want to update.
Use .update() to update the database node value.
Use firebase.database.ServerValue.TIMESTAMP to get server time. Using client timestamp is not safe due different timezones.
With this you can write your function. It should look something like this
const serverTime = firebase.database.ServerValue.TIMESTAMP;
setInterval(() => {
database.ref('myNode').once('value',(snapshot) => {
const updatedDate = snapshot.val() && snapshot.val().storageTime ? snapshot.val().storageTime - serverTime : serverTime;
snapshot.ref().update({ storageTime: updatedDate })
});
}, 5000);
Consider this code just as an example, not a copy/paste solution.
Hope it helps.
There is no sense in polling the database, rather, tell it you want to listen for value (change) events.
From Read and Write Data on the Web
Important: The value event is called every time data is changed at the
specified database reference, including changes to children. To limit
the size of your snapshots, attach only at the lowest level needed for
watching changes. For example, attaching a listener to the root of
your database is not recommended. The following example demonstrates a
social blogging application retrieving the star count of a post from
the database:
var starCountRef = firebase.database().ref('posts/' + postId + '/starCount');
starCountRef.on('value', function(snapshot) {
updateStarCount(postElement, snapshot.val());
});

Functional Javascript BaconJS, how can I push more values to an event stream?

I'm attempting to create a stack of AJAX responses in BaconJS. That processes them in a first in first out fashion, but each 'out' event should wait for user input.
This is where I'm at now: Live JSBin
var pages = Bacon.fromArray([1,2,3])
var next = $("#next").asEventStream('click').map(true);
pages.flatMapConcat(asyncFunction).zip(next).log("responses")
function asyncFunction(page) {
// Simulating something like an AJAX request
return Bacon.later(1000 + (Math.random() * 3000), "Page "+ page)
}
Currently this synchronously outputs an event from the pages EventStream each time that #next is clicked, which the behavior I want.
However, I am unable to figure out how to push more values to the pages EventStream. I have attempted to replace the pages EventStream with a Bus, and pushing values like this (which doesn't work).
var pages = new Bacon.Bus()
pages.push("value")
How do I push more values to an EventStream?
I know this is an OLD post, but bus would work. Just push the number (you had an array of numbers before) into it:
var pages = new Bacon.Bus();
// Bacon.fromArray([1,2,3])
var next = $("#next").asEventStream('click').map(true);
pages.flatMapConcat(asyncFunction).zip(next).log("responses")
function asyncFunction(page) {
// Simulating something like an AJAX request
return Bacon.later(1000 + (Math.random() * 3000), "Page "+ page)
}
pages.push(1);
pages.push(2);
pages.push(3);
I cloned your jsbin and changed it to use bus
As mentioned previously, you could stream the source of the page values using something like fromEvent or from fromBinder

Firebase promise returns before collection is updated

I'm using AngularFire. I have some code which is supposed to add a new record to an array of records and using the promise then function, it is supposed to re-evaluate the array to find out which one has the most recent datestamp in the collection.
this.addRecord = function() {
// Add a new record with the value the user has typed in
$scope.records.$add({
"date": (new Date()).toString(),
"value": $scope.newValue
}).then(function( ref ) {
// Use underscore.last to determine which is the
var _newestValue = _.max( $scope.records, function(record) {
return record.date;
})[0].value;
sync.$update({ 'newestValue': _newestValue });
});
// Have angular clear the field
$scope.newValue = '';
}
The problem is that when the promise.then() calls, my local copy of $scope.records is not yet updated with the newest record. So while firebase out on the server now has the new record, when I iterate on $scope.records I get all the records except for the one I just added. After the .then() completes, I can see that the record has been added.
Maybe I'm using the promise wrong? I was under the impression that when AngularFire finally calls the .then() that it would be after angular had added the new record on the server and synced up the local collection.
What's the right way to do this? I just need to reliably know when the record has been added locally. Thanks in advance!
So it turns out using model.$watch was the right way to go. $watch only fires when synced changes are made to the model, so you know you can reliably count on them.
$scope.records.$watch( watchCallback );
watchCallback = function() {
if($scope.loadingModel) return; //Block updates during load
var _newestValue = _.max( $scope.records, function(record) {
return record.date;
})[0].value;
sync.$update({ 'newestValue': _newestValue });
}
AngularFire's model.$add().then() will fire when the changes were sent to the server, not when the local model is synced up to the client. So $add.then would be more appropriately used to confirm that changes were saved, or something along those lines.

Self-triggered perpetually running Firebase process using NodeJS

I have a set of records that I would like to update sequentially in perpetuity. Basically:
Get least recently updated record
Update record
Set date of record to now (aka. send it to the back of the list)
Back to step 1
Here is what I was thinking using Firebase:
// update record function
var updateRecord = function() {
// get least recently updated record
firebaseOOO.limit(1).once('value', function(snapshot) {
key = _.keys(snapshot.val())[0];
/*
* do 1-5 seconds of non-Firebase processing here
*/
snapshot.ref().child(key).transaction(
// update record
function(data) {
return updatedData;
},
// update priority after commit (would like to do it in transaction)
function(error, committed, snap2) {
snap2.ref().setPriority(snap2.dateUpdated);
}
);
});
};
// listen whenever priority changes (aka. new item needs processing)
firebaseOOO.on('child_moved', function(snapshot) {
updateRecord();
});
// kick off the whole thing
updateRecord();
Is this a reasonable thing to do?
In general, this type of daemon is precisely what was envisioned for use with the Firebase NodeJS client. So, the approach looks good.
However, in the on() call it looks like you're dropping the snapshot that's being passed in on the floor. This might be application specific to what you're doing, but it would be more efficient to consume that snapshot in relation to the once() that happens in the updateRecord().

Categories

Resources