I am using Jquery promises to handle opening of an indexedDB to store files and then read and write files to it (am using these for the first time). As any of the functions can be called in any order I always call the function to open the DB first before attempting the operation, the code is as below,
var DatabaseSingleton = (function () {
var openDbPromise = $.Deferred();
var openDb = function() {
var db;
var request = window.indexedDB.open("myDb", 2);
request.onerror = function(event) {
console.error('Error opening indexedDB connection.');
openDbPromise.reject();
}
request.onsuccess = function(event) {
console.log('db opened', request.result);
db = request.result;
db.onerror = function(event) {
console.error("Database error: " + event.target.error.name);
};
openDbPromise.resolve(db);
}
request.onupgradeneeded = function(event) {
console.log('upgrading the idb', event.target.result);
db = event.target.result;
// create a store for the files
db.createObjectStore("fileInfo", { keyPath: "name" }).createIndex("name", "name", { unique: false });
};
return openDbPromise.promise();
};
return {
// retrive a list of all files in the DB
getFilesList: function() {
var filesPromise = $.Deferred();
openDb().then(function(db) {
...
});
return filesPromise.promise();
},
// retrieve metainfo of the file specified by its fileName
getFileinfo: function (fileName) {
var getInfoPromise = $.Deferred();
openDb().then(function(db) {
...
});
return getInfoPromise.promise();
},
}) ();
However with this I notice the 'db opened' being displayed every time any of the functions are called. Is there a better way to make sure that it is only being opened once and then just resolved for succeeding calls?
As it stands, var request = window.indexedDB.open("myDb", 2); etc is executed unconditionally every time openDb() is called.
The most straightforward approach is to introduce an if(...){} clause, to ensure that var request = window.indexedDB.open("myDb", 2); etc is only executed when a successful request (and hence db) has not been established (or is not in the process of being established).
Try this :
var DatabaseSingleton = (function () {
var openDbDeferred;
var openDb = function() {
if(!openDbDeferred || openDbDeferred.isRejected()) {
openDbDeferred = $.Deferred();
var db;
var request = window.indexedDB.open("myDb", 2);
request.onsuccess = function(event) {
console.log('db opened', request.result);
db = request.result;
db.onerror = function(event) {
console.error("Database error: " + event.target.error.name);
};
openDbDeferred.resolve(db);
};
request.onerror = function(event) {
console.error('Error opening indexedDB connection.');
openDbDeferred.reject();
};
request.onupgradeneeded = function(event) {
console.log('upgrading the idb', event.target.result);
db = event.target.result;
// create a store for the files
db.createObjectStore("fileInfo", { keyPath: "name" }).createIndex("name", "name", { unique: false });
};
}
return openDbDeferred.promise();
};
return {
//retrive a list of all files in the DB
getFilesList: function() {
return openDb().then(function(db) {
...
});
},
//retrieve metainfo of the file specified by its fileName
getFileinfo: function(fileName) {
return openDb().then(function(db) {
...
});
}
};
}) ();
If you don't want openDb() to keep trying after previous failure, then change :
if(!openDbDeferred || openDbDeferred.isRejected()) {
to :
if(!openDbDeferred) {
I've written a small jquery plugin (still very alpha) that does this:
https://github.com/ameyms/jquery-indexeddb
And I've tried to keep the API super simple:
//Define and initialize an IndexedDB ...
var db = $.idb({
name:'foobar',
version: 2,
drop: stores_to_be_deleted,
stores:list_of_stores
});
// ... Add objects to a store
db.put(items, 'into_store').done(onsuccess);
//.. And delete objects from a store
db.remove('from_store', conditionFunc).done(onremoval);
//.. And not to forget fetching objects from a store
db.select('from_my_store', conditionFunc).done(function (items){
console.log(items)
});
Hope you like it!
The design is not robust in practice, because you cannot reliably use the database connection. It can be blocked at any time by other connection. Without closing the connection, you will find infrequently that, a promise never resolve due to race condition among connections. On the other hand, if you close the connection, how will consumer know the connection is closed.
Related
I used this post: IndexedDB: upgrade with promises?
And implemented the part here: https://stackoverflow.com/a/25565755/15778635
This works for what I need. the part I am having trouble with is this:
var newMigrationPromise = function (dbName, version, migration) {
return newPromise(function (deferred) {
var request = indexedDB.open(dbName, version);
// NB: caller must ensure upgrade callback always called
request.onupgradeneeded = function (event) {
var db = request.result;
newTransactionPromise(
function () {
var syncUPStore = transaction.objectStore("syncUP");
var syncCountRequest = syncUPStore.count();
syncCountRequest.oncomplete = function (event) {
if (syncCountRequest.result > 0)
deferred.reject(syncCountRequest.result + " SyncUp Records exist, database upgrade aborted, keeping at current version.");
else {
//Good, continue with update
migration(db, request.transaction);
return request.transaction;
}
}
})
.then(function () { db.close(); })
.then(deferred.resolve, deferred.reject);
};
request.onerror = function (ev) { deferred.reject(request.error); };
});
};
I have a syncUP object store that has data that needs to be sent to the server when the user goes online. In this particular case the service worker is installing (because they came online and a change was put on the server) and needs to know if syncUP records exist prior to allowing the service worker to update. If they do exist then it needs to abort the install until it is empty.
The service worker abort works fine, and the database aborting upgrade works fine if I were to throw an error where var syncCountRequest = syncUPStore.count(); is.
My question:
How can I check if there are records in the "syncUP" object store and still use the implementation I mentioned above? I had considered moving the logic to another method, but I found I was having the same issue of not knowing how to handle the reject/resolve. My Promises knowledge is ok, but not good enough yet to figure it out on my own.
a rushed example:
var request = indexedDb.open(...);
request.onupgradeneeded = function(event) {
if(conditionShouldDoMigrationFromVersionXToNowIsTrue) {
migrate(event.transaction);
}
};
function migrate(versionChangeTransaction) {
var store = versionChangeTransaction.objectStore('x');
var request = store.getAll();
request.onsuccess = function(event) {
var objects = event.target.result;
for (var object of objects) {
// do some mutation to the object
object.x++;
// write it back
store.put(object);
}
};
}
My website has a JSON file containing data that is intended to be transferred into my users' local IndexedDB. I'm looking for a way to only load this JSON file when an update is actually needed.
To clarify, I plan for my website to run almost entirely off of my users' locally stored data whenever possible, similar to an app. They should only have to download the JSON file when a new IDB update is available.
So far, I've tried accomplishing this by running the onUpgradeNeeded event as an async function.
if (!window.indexedDB) {
window.alert("Your browser doesn't support a stable version of IndexedDB, which is required for most functions of this website. For the best support, please use the latest version of Chrome, Firefox, or Safari.");
}
else {
var dbVer = 39; //IDB Version (int only)
var recipeObject; //instantiate global variable for module object import
var recipeArray = []; //instantiate global array for module import
var recipeDBver; //instantiate global variable for actual database version (TODO: implement version checking)
var upgradeNeeded = false;
var clearNeeded = false;
var openRequest = indexedDB.open('recipeDatabase', dbVer);
console.log("IDB.js running");
openRequest.onsuccess = function(e) {
console.log('Running onSuccess');
};
openRequest.onerror = function(e) {
console.log('Open Request ERROR');
console.dir(e);
};
openRequest.onupgradeneeded = async function(e) {
var db = e.target.result;
console.log('Running onUpgradeNeeded');
db.onerror = function(errorEvent) {
console.log("onUpgradeNeeded ERROR");
return;
};
importObject = await import("/resources/recipeDB.js");
//TODO: remove debugging
console.log('Module loaded');
console.log(importObject);
recipeObject = importObject.default;
console.log(recipeObject);
recipeDBver = recipeObject.recipeDBver;
console.log(recipeDBver);
recipeArray = recipeObject.recipeArray;
console.log(recipeArray);
upgradeNeeded = true;
if (!db.objectStoreNames.contains('drinks')) {
var storeOS = db.createObjectStore('drinks', {keyPath: 'id'});
storeOS.createIndex('name', 'name', { unique: false });
storeOS.createIndex('type', 'type', { unique: false });
storeOS.createIndex('subtype', 'subtype', { unique: false });
storeOS.createIndex('tags', 'tags', { unique: false });
}
else {
clearNeeded = true;
}
console.log('IDB Upgrade Needed: ' + upgradeNeeded);
console.log('IDB Clear Needed: ' + clearNeeded);
db = e.target.result;
if (clearNeeded == true) {
clearData();
}
else if (upgradeNeeded == true) {
for (var i = 0; i < recipeArray.length; i++) {
addItem(recipeArray[i]);
}
}
};
function clearData() {
var db = openRequest.result;
var transaction = db.transaction(["drinks"], "readwrite");
var objectStore = transaction.objectStore("drinks");
var objectStoreRequest = objectStore.clear();
objectStoreRequest.onerror = function(e) {
console.log('Error clearing data. ', e.target.error.name);
console.dir(e);
};
objectStoreRequest.onsuccess = function(e) {
console.log('Data cleared successfully.')
for (var i = 0; i < recipeArray.length; i++) {
addItem(recipeArray[i]);
}
};
}
function addItem(curItem) {
var db = openRequest.result;
var transaction = db.transaction(['drinks'], 'readwrite');
var store = transaction.objectStore('drinks');
var item = curItem;
var request = store.add(item);
request.onerror = function(e) {
console.log('Error', e.target.error.name);
console.dir(e);
};
request.onsuccess = function(e) {
console.log('Item added: ' + curItem.name);
};
}
}
The console returns the following:
I'm assuming the onUpgradeNeeded event is timing out before the JSON file is able to load.
Is there a way to delay the timeout? If not, does anyone know of a better way to accomplish what I'm trying to do?
The problem can be seen from what you see in the console. First we get IDB.js is running and then you go into your onupgradeneeded handler, but then, instead of having anything from that function console logged, we immediately see the onsuccess handler run. The cause of this is because you defined your onupgradeneeded handler to be async, which means that this function essentially stops execution at the await import("/resources/recipeDB.js"); line while it waits for the import to resolve. This essentially means as far as the IDB events are concerned that onupgradeneeded is done and it needs to go into the onsuccess. As Josh says above this is because onupgradeneeded needs to resolve synchronously.
What you can do to get around this, is:
Make onupgradeneeded synchronous
Update your IDB schema (create your new object store and indices)
Import the data and upon successful import insert them into your database
This is one of the difficult things about using IndexedDB: it's not promised-based so using promised-based async functions doesn't always play well with it. I usually find these short-comings require more code to handle them (such as using .then() calls so that I can have synchronous event handlers while still doing necessary asynchronous activities).
The onupgradeneeded event handler needs to complete synchronously. More accurately, requests upon/within the version change transaction that is running need to be started in the same tick of the event loop as when the version change transaction is started.
It is not clear from your question but it looks like you are making an async call to load the json and waiting for it to load, and this wait that happens is what allows the versionchange transaction to complete, and causes all requests made afterward to not occur in the same tick of the event loop.
Step 1:Use javascript package json data.
Step 2:Use importScript() in upgrade event.
Step 3:Run your indexedDB script with Worker.
I'm building a "storage provider" that allows consuming code to store stuff through an interface. Consider the below code snippets to be pseudocode as I'm going for MCVE. I'm trying to get my hands on IMPORTANTDATA and IMPORTANTKEY below.
At the lowest level, I have a baseService:
define([], function(){
return function(){
this.sendRequest = function(data){
return $.ajax(data).done(function(response){
return response.IMPORTANTDATA; // <---- This is needed
}).fail(function(response){
throw new Error(response);
});
}
}
})
I build services with this to reuse some base functionality, for example - eventService:
define(["baseService"], function(baseService){
const eventService = new baseService();
eventService.postMediaEvent = function(eventType, mediaPath, storageProvider){
// isolated logic here
return eventService.sendRequest(someData);
}
})
This is where things start to get tricky: I have a baseStorageClient:
define(["eventService"], function (eventService) {
return function(){
this.storageProvider = null;
const self = this;
this.storeMetadata = function(eventType, mediaPath){
return eventService.postMediaEvent(eventType, mediaPath, self.storageProvider);
};
this.storeMedia = function(){
throw new Error("Not Implemented");
};
}
}
But this guy isn't ever used directly. I have instances of this created - for example, indexedDbClient:
define(["baseStorageClient"], function(baseStorageClient){
const indexedDbClient = new baseStorageClient();
indexedDbClient.storeMedia = function(blob){
return openDatabase().then(function () {
const request = database.transaction(storeName, "readwrite")
.objectStore(storeName)
.add(dbEntry);
request.onsuccess = function (event) {
logger.log("combined segments saved into database.");
// todo - figure out how to resolve here
return {
IMPORTANTKEY: dbEntry.mediaId // <---- This too
}
};
request.onerror = function (event) {
// todo: figure out how to reject here
logger.log("Unable to save segments " + e);
};
});
}
})
And this client is used within my storageInterface:
define(["indexedDbClient"], function(indexedDbClient){
const storageInterface = {};
var currentClient = indexedDbClient; // might be other clients
storageInterface.storeMedia = function (blob) {
return currentClient.storeMedia(blob).then(function(mediaPath) {
return currentClient.storeMetadata(eventType, mediaPath);
});
}
});
This is where things get super hairy. What I'm trying to achieve is the following:
storageInterface.storeMedia(superBuffer).then(function (importantStuff) {
// this should go storeMedia > baseStorageClient > eventService
importantStuff.IMPORTANTKEY;
importantStuff.IMPORTANTDATA;
});
But I can't quite figure out how to get this handled. How can I compile a result along a chain of promises like this?
There's two major problems:
You should treat done and fail as deprecated. They don't allow for any chaining, they will discard the results of the callback. Always use then.
sendRequest = function(data){
return $.ajax(data).then(function(response){
return response.IMPORTANTDATA;
}, function(response) {
throw new Error(response);
});
}
Your transaction doesn't return any promise yet, so there's nothing for you to chain onto. You'll need to promisify it first:
function promiseFromRequest(req) {
return new Promise(function(resolve, reject) {
req.onsuccess = resolve;
req.onerror = reject;
});
}
Now you can actually use it like so:
storeMedia = function(blob){
return openDatabase().then(function () {
return promiseFromRequest(database.transaction(storeName, "readwrite")
.objectStore(storeName)
.add(dbEntry))
.then(function (event) {
logger.log("combined segments saved into database.");
return {
IMPORTANTKEY: dbEntry.mediaId
}
}, function (e) {
logger.log("Unable to save segments " + e);
throw e;
};
});
};
With those, you should be able to combine the results from storeMedia and storeMetaData in some way.
When i receive message from server ditConsumer based on flag i am invoking a function sendMessageToFile() that is working as expected, problem here is when i navigate to other page and come back again so here when message comes in socket.on is listening twice and executing all variables and method twice. if i go back and forth three time it will listen 3 times, I would appreciate help here, it looks like socket.io connection problem over multiple tabs.
ctrll.js
socket.on('ditConsumer',function (data) {
console.log('SEND MESSAGE FLAG',sendMessageFlag)
console.log('MESSAGE FROM SERVER',data);
var obj = {
file:$scope.filename,
data:data
}
$scope.event.push(data);
// socket.emit('messageToFile',obj);
if(sendMessageFlag === true) {
return sendMessageToFile(obj);
}
});
function sendMessageToFile (data){
if(data.file) {
socket.emit('startrecording', data);
$scope.disabledRecBtn = true;
$scope.disabledStopBtn = false;
$scope.showMessage = true;
}
}
socketFactory.js
angular.module('loggingApp').factory('socket', function($rootScope) {
'use strict';
var server = 'http://localhost:3000';
var socket = io.connect(server, {
'forceNew': true
});
return {
on: function(eventName, callback) {
socket.on(eventName, function() {
var args = arguments;
$rootScope.$apply(function() {
callback.apply(socket, args);
});
});
},
emit: function(eventName, data, callback) {
socket.emit(eventName, data, function() {
var args = arguments;
$rootScope.$apply(function() {
if (callback) {
callback.apply(socket, args);
}
});
})
}
};
});
serverIo.js
var sio = require('socket.io');
var ditconsumer = require('./consumers/ditconsumer');
var logsRecording = require('./records/logsRecording');
var io = null;
exports.io = function () {
return io;
};
exports.initialize = function(server) {
io = sio(server);
io.on('connection', function(socket) {
// Producer.startProducer();
ditconsumer.start(function(value){
io.emit('ditConsumer',value);
});
socket.on('createlogfile', function(params) {
logsRecording.userLogs(function(err,filename) {
if (err){
console.log(err);
} else {
socket.emit('filename', filename);
}
});
});
socket.on('startrecording', function(obj) {
logsRecording.recordLogs(obj);
});
socket.on('stopRecording',function (filename) {
console.log('stop recording data',filename);
logsRecording.deleteFile(filename);
});
});
};
One way would be to...
// add an event_id to the event message on server-side
io.sockets.emit('my_event', {msg:'hi', event_id: '1'});
// on client side keep track of handled events
var acknowledged = [];
// handle event
io.on('my_event', function (msg) {
// only continue if the event hasn't been handled before
if(!~acknowledged.indexOf(msg.event_id)){
// add to array of acknowledged events
acknowledged.unshift(msg.event_id);
// prevent array from growing to large
if(acknowledged.length > 20){
acknowledged.length = 20;
}
// handle once per event
}
});
You might also want to utilize socket.io rooms and create unique identifiers for ever every connection or create something like an uuid and store it in the localstorage space of the browser; read or create it and supply it when connection to io. Then when you send an event you could target specific rooms.
Or do something similar.....
I have this code:
html5DB.indexedDB.addSomething = function(foo) {
var db = html5DB.indexedDB.db;
var trans = db.transaction(["something"], "readwrite");
var store = trans.objectStore("something");
var isExisting = IsAlreadyExist(foo);
// How to wait for that instruction to finish?
if (!isExisting){
var request = store.put({
"text": foo,
"timeStamp" : new Date().getTime()
});
}
};
I'm trying to understand how can I wait for the function IsAlreadyExist to finish. This function opens a cursor to iterate in the object store to determinate if a particular value exist.
Actually, there is no error, but I can't reach the code inside the if (!isExisting) because the value of the variable never changes.
This is the code of that second function:
function IsAlreadyExist(foo){
var db = html5DB.indexedDB.db;
var objectStore = db.transaction("something").objectStore("something");
objectStore.openCursor().onsuccess = function(event) {
var cursor = event.target.result;
if (cursor) {
if (cursor.value.text == foo)
return true;
cursor.continue();
}
};
return false;
}
Is there a way to wait for the end of the execution? Or maybe it is not a good way to proceed for checking if a value exist?
As IndexedDB is an asynchronous API you have to wait until operations are completed using callbacks.
First off we need to refactor the IsAlreadyExist method:
function IsAlreadyExist(foo, oncomplete) {
var db = html5DB.indexedDB.db;
var transaction = db.transaction("something");
var objectStore = transaction.objectStore("something");
var exists = false;
objectStore.openCursor().onsuccess = function(event) {
var cursor = event.target.result;
if (cursor) {
if (cursor.value.text == foo) {
exists = true;
return;
}
cursor.continue();
}
};
transaction.oncomplete = function () {
oncompleted(exists);
};
}
Now we'll refactor your other method:
html5DB.indexedDB.addSomething = function(foo) {
IsAlreadyExist(foo, function (isExisting) {
if (!isExisting){
var db = html5DB.indexedDB.db;
var trans = db.transaction(["something"], "readwrite");
var store = trans.objectStore("something");
var request = store.put({
"text": foo,
"timeStamp" : new Date().getTime()
});
}
});
};
So here you'll see we're passing the function to execute when the cursor search is done as a callback, which receives an argument of the status.
This can start getting a bit ugly, which is why Promise's are popular in JavaScript. I wrote a wrapper for IndexedDB that uses a Promise-based API called db.js.