I used this post: IndexedDB: upgrade with promises?
And implemented the part here: https://stackoverflow.com/a/25565755/15778635
This works for what I need. the part I am having trouble with is this:
var newMigrationPromise = function (dbName, version, migration) {
return newPromise(function (deferred) {
var request = indexedDB.open(dbName, version);
// NB: caller must ensure upgrade callback always called
request.onupgradeneeded = function (event) {
var db = request.result;
newTransactionPromise(
function () {
var syncUPStore = transaction.objectStore("syncUP");
var syncCountRequest = syncUPStore.count();
syncCountRequest.oncomplete = function (event) {
if (syncCountRequest.result > 0)
deferred.reject(syncCountRequest.result + " SyncUp Records exist, database upgrade aborted, keeping at current version.");
else {
//Good, continue with update
migration(db, request.transaction);
return request.transaction;
}
}
})
.then(function () { db.close(); })
.then(deferred.resolve, deferred.reject);
};
request.onerror = function (ev) { deferred.reject(request.error); };
});
};
I have a syncUP object store that has data that needs to be sent to the server when the user goes online. In this particular case the service worker is installing (because they came online and a change was put on the server) and needs to know if syncUP records exist prior to allowing the service worker to update. If they do exist then it needs to abort the install until it is empty.
The service worker abort works fine, and the database aborting upgrade works fine if I were to throw an error where var syncCountRequest = syncUPStore.count(); is.
My question:
How can I check if there are records in the "syncUP" object store and still use the implementation I mentioned above? I had considered moving the logic to another method, but I found I was having the same issue of not knowing how to handle the reject/resolve. My Promises knowledge is ok, but not good enough yet to figure it out on my own.
a rushed example:
var request = indexedDb.open(...);
request.onupgradeneeded = function(event) {
if(conditionShouldDoMigrationFromVersionXToNowIsTrue) {
migrate(event.transaction);
}
};
function migrate(versionChangeTransaction) {
var store = versionChangeTransaction.objectStore('x');
var request = store.getAll();
request.onsuccess = function(event) {
var objects = event.target.result;
for (var object of objects) {
// do some mutation to the object
object.x++;
// write it back
store.put(object);
}
};
}
Related
My website has a JSON file containing data that is intended to be transferred into my users' local IndexedDB. I'm looking for a way to only load this JSON file when an update is actually needed.
To clarify, I plan for my website to run almost entirely off of my users' locally stored data whenever possible, similar to an app. They should only have to download the JSON file when a new IDB update is available.
So far, I've tried accomplishing this by running the onUpgradeNeeded event as an async function.
if (!window.indexedDB) {
window.alert("Your browser doesn't support a stable version of IndexedDB, which is required for most functions of this website. For the best support, please use the latest version of Chrome, Firefox, or Safari.");
}
else {
var dbVer = 39; //IDB Version (int only)
var recipeObject; //instantiate global variable for module object import
var recipeArray = []; //instantiate global array for module import
var recipeDBver; //instantiate global variable for actual database version (TODO: implement version checking)
var upgradeNeeded = false;
var clearNeeded = false;
var openRequest = indexedDB.open('recipeDatabase', dbVer);
console.log("IDB.js running");
openRequest.onsuccess = function(e) {
console.log('Running onSuccess');
};
openRequest.onerror = function(e) {
console.log('Open Request ERROR');
console.dir(e);
};
openRequest.onupgradeneeded = async function(e) {
var db = e.target.result;
console.log('Running onUpgradeNeeded');
db.onerror = function(errorEvent) {
console.log("onUpgradeNeeded ERROR");
return;
};
importObject = await import("/resources/recipeDB.js");
//TODO: remove debugging
console.log('Module loaded');
console.log(importObject);
recipeObject = importObject.default;
console.log(recipeObject);
recipeDBver = recipeObject.recipeDBver;
console.log(recipeDBver);
recipeArray = recipeObject.recipeArray;
console.log(recipeArray);
upgradeNeeded = true;
if (!db.objectStoreNames.contains('drinks')) {
var storeOS = db.createObjectStore('drinks', {keyPath: 'id'});
storeOS.createIndex('name', 'name', { unique: false });
storeOS.createIndex('type', 'type', { unique: false });
storeOS.createIndex('subtype', 'subtype', { unique: false });
storeOS.createIndex('tags', 'tags', { unique: false });
}
else {
clearNeeded = true;
}
console.log('IDB Upgrade Needed: ' + upgradeNeeded);
console.log('IDB Clear Needed: ' + clearNeeded);
db = e.target.result;
if (clearNeeded == true) {
clearData();
}
else if (upgradeNeeded == true) {
for (var i = 0; i < recipeArray.length; i++) {
addItem(recipeArray[i]);
}
}
};
function clearData() {
var db = openRequest.result;
var transaction = db.transaction(["drinks"], "readwrite");
var objectStore = transaction.objectStore("drinks");
var objectStoreRequest = objectStore.clear();
objectStoreRequest.onerror = function(e) {
console.log('Error clearing data. ', e.target.error.name);
console.dir(e);
};
objectStoreRequest.onsuccess = function(e) {
console.log('Data cleared successfully.')
for (var i = 0; i < recipeArray.length; i++) {
addItem(recipeArray[i]);
}
};
}
function addItem(curItem) {
var db = openRequest.result;
var transaction = db.transaction(['drinks'], 'readwrite');
var store = transaction.objectStore('drinks');
var item = curItem;
var request = store.add(item);
request.onerror = function(e) {
console.log('Error', e.target.error.name);
console.dir(e);
};
request.onsuccess = function(e) {
console.log('Item added: ' + curItem.name);
};
}
}
The console returns the following:
I'm assuming the onUpgradeNeeded event is timing out before the JSON file is able to load.
Is there a way to delay the timeout? If not, does anyone know of a better way to accomplish what I'm trying to do?
The problem can be seen from what you see in the console. First we get IDB.js is running and then you go into your onupgradeneeded handler, but then, instead of having anything from that function console logged, we immediately see the onsuccess handler run. The cause of this is because you defined your onupgradeneeded handler to be async, which means that this function essentially stops execution at the await import("/resources/recipeDB.js"); line while it waits for the import to resolve. This essentially means as far as the IDB events are concerned that onupgradeneeded is done and it needs to go into the onsuccess. As Josh says above this is because onupgradeneeded needs to resolve synchronously.
What you can do to get around this, is:
Make onupgradeneeded synchronous
Update your IDB schema (create your new object store and indices)
Import the data and upon successful import insert them into your database
This is one of the difficult things about using IndexedDB: it's not promised-based so using promised-based async functions doesn't always play well with it. I usually find these short-comings require more code to handle them (such as using .then() calls so that I can have synchronous event handlers while still doing necessary asynchronous activities).
The onupgradeneeded event handler needs to complete synchronously. More accurately, requests upon/within the version change transaction that is running need to be started in the same tick of the event loop as when the version change transaction is started.
It is not clear from your question but it looks like you are making an async call to load the json and waiting for it to load, and this wait that happens is what allows the versionchange transaction to complete, and causes all requests made afterward to not occur in the same tick of the event loop.
Step 1:Use javascript package json data.
Step 2:Use importScript() in upgrade event.
Step 3:Run your indexedDB script with Worker.
Lets say that we have two buttons, each on are calling the following method:
var NUMBER_OF_IMAGE_REQUEST_RETRIES = 3;
var IMAGE_REQUEST_TIMEOUT = 3000;
processImage: function(image_data) {
var main_response = $q.defer();
var hash = getImageHash(image_data);
var requestsCounter = -1;
requestImage = function() {
$http.post(apiUrl, {params: {data: hash},timeout: IMAGE_REQUEST_TIMEOUT})
.then(function(response) {
return main_response.resolve(response.data);
}, function(error) {
if (++requestsCounter < NUMBER_OF_IMAGE_REQUEST_RETRIES) {
requestLabelsImage();
} else {
return main_response.reject();
}
});
};
requestLabelsImage();
return main_response.promise;
}
The method passes an image related data to the server, the server process the data and then response. Every time a user press a different button different image_data is being send to the server.
The problem:
The user press button 1, the method is called with image_data_1, and then he/she immediately press button 2 and the method is called with image_data_2. The processImage function is called by another method, lets say doSomethingWithTheResponse which only cares about the latest user's action, but the image_data_2 is proceed faster by the servers, so the client gets image_data_2 before image_data_1, so the client believes that image_data_1 was related to the user's latest action, which is not the case. How can we ensure that the client is always getting the response that is related to the users latest action?
Note: The hash is different for the differente image_data requests.
I was thinking something like:
var oldhash = null;
processImage: function(image_data) {
var main_response = $q.defer();
var hash = getImageHash(image_data);
oldhash = hash;
var requestsCounter = -1;
requestImage = function(hash) {
if(hash === oldhash){
$http.post(apiUrl, {params: {data: hash},timeout: IMAGE_REQUEST_TIMEOUT})
.then(function(response) {
return main_response.resolve(response.data);
}, function(error) {
if (++requestsCounter < NUMBER_OF_IMAGE_REQUEST_RETRIES) {
requestLabelsImage(hash);
} else {
return main_response.reject();
}
});
}
else {
main_response.reject();
}
}
requestLabelsImage(hash);
return main_response.promise;
}
But I am not 100% sure that this is the right approach.
Simply disregard the previous requests.
You can create a repository of requests (array or dictionary implementation is okay). Call .abort() on the previous ones once another request is made -- when you add it in your storage.
If you want a dictionary, there is a good example here (tackles a different topic, though), but here is a modified snippet of his code which is related to your case:
var _pendingRequests = {};
function abortPendingRequests(key) {
if (_pendingRequests[key]) {
_pendingRequests[key].abort();
}
}
Where the key can be.. say... a category of your action. You can name constants for it, or it can be just the name of the button pressed. It can even be a URL of your request; completely up to you.
There is an excellent explanation of the whole concept here:
jquery abort() ajax request before sending another
https://stackoverflow.com/a/3313022/594992
If your UI allows for initiation multiple actions, while processing of those actions are mutually exclusive, then you should probably use promises, and track active promises.
button1.addEventListener("click", function(evt) {
startRunning( task1.start() );
});
button2.addEventListener("click", function(evt) {
startRunning( task2.start() );
});
With a task runner like:
function startRunning( promise ) {
while(runningTasks.length>0) {
cancel( runningTasks.unshift() );
});
runningTasks.push( promise );
}
Your cancel function can come from anything that can deal with promises, like Angular's service.cancelRequest, or you can write your own code that takes the promise and smartly breaks off its operation.
Of course, if you're not using promises, then you probably want to start doing so, but if you absolutely can't you can use a manager object like:
button1.addEventListener("click", function(evt) { task1(); });
button2.addEventListener("click", function(evt) { task2(); });
with
var manager = [];
function cancelAll() {
while(manager.lenght>0) {
var cancelfn = manager.unshift()
cancelfn();
}
return true;
}
function task1() {
var running = cancelAll();
manager.push(function() { running = false; });
asyncDo(something1, function(result) {
if(!running) return;
// do your real thing
});
}
function task1() {
var running = cancelAll();
manager.push(function() { running = false; });
asyncDo(something2, function(result) {
if(!running) return;
// do your real thing
});
}
And you can put cancels on as many aspects as you need. If you need to cancel running XHRs, you might be able to do so, if you have multiple steps in your result handling, cut off at each step start, etc.
This sounds like an ideal use-case for promises. Basically, whenever a new request is made, you want to cancel any existing promises. I am not versed in AngularJS, but the following ng-specific links might prove useful:
Angularjs how to cancel resource promise when switching routes
Canceling A Promise In AngularJS
I want to get MX records for hostname www.example.com. Node has got function for it.
dns.resolveMx(domain, callback)
But i don't want that callback thingy. I want something like sync call. e.g.
var records = dns.resolveMx(domain);
Is this possible ?
(Note:- I found that function in Node documentation. link:- http://nodejs.org/api/dns.html)
Update 2021
There is now a promises submodule under dns module if some one is looking for sync calls.
Check more here
https://nodejs.org/api/dns.html#dns_class_dnspromises_resolver
const dnsPromises = require('dns').promises;
const demo1 = await dnsPromises.resolveMx("gmail.com");
Is there any reason you want to block your application with a network operation? The DNS resolvers are called at the C level by the c-ares library which is asynchronous by design. Therefore, you can't make it synchronous. This is the code from the DNS module with the unneeded parts removed:
var cares = process.binding('cares_wrap');
function resolver(bindingName) {
var binding = cares[bindingName];
return function query(name, callback) {
callback = makeAsync(callback);
var req = {
bindingName: bindingName,
callback: callback,
oncomplete: onresolve
};
var err = binding(req, name);
if (err) throw errnoException(err, bindingName);
callback.immediately = true;
return req;
}
}
var resolveMap = {};
exports.resolveMx = resolveMap.MX = resolver('queryMx');
I am using Jquery promises to handle opening of an indexedDB to store files and then read and write files to it (am using these for the first time). As any of the functions can be called in any order I always call the function to open the DB first before attempting the operation, the code is as below,
var DatabaseSingleton = (function () {
var openDbPromise = $.Deferred();
var openDb = function() {
var db;
var request = window.indexedDB.open("myDb", 2);
request.onerror = function(event) {
console.error('Error opening indexedDB connection.');
openDbPromise.reject();
}
request.onsuccess = function(event) {
console.log('db opened', request.result);
db = request.result;
db.onerror = function(event) {
console.error("Database error: " + event.target.error.name);
};
openDbPromise.resolve(db);
}
request.onupgradeneeded = function(event) {
console.log('upgrading the idb', event.target.result);
db = event.target.result;
// create a store for the files
db.createObjectStore("fileInfo", { keyPath: "name" }).createIndex("name", "name", { unique: false });
};
return openDbPromise.promise();
};
return {
// retrive a list of all files in the DB
getFilesList: function() {
var filesPromise = $.Deferred();
openDb().then(function(db) {
...
});
return filesPromise.promise();
},
// retrieve metainfo of the file specified by its fileName
getFileinfo: function (fileName) {
var getInfoPromise = $.Deferred();
openDb().then(function(db) {
...
});
return getInfoPromise.promise();
},
}) ();
However with this I notice the 'db opened' being displayed every time any of the functions are called. Is there a better way to make sure that it is only being opened once and then just resolved for succeeding calls?
As it stands, var request = window.indexedDB.open("myDb", 2); etc is executed unconditionally every time openDb() is called.
The most straightforward approach is to introduce an if(...){} clause, to ensure that var request = window.indexedDB.open("myDb", 2); etc is only executed when a successful request (and hence db) has not been established (or is not in the process of being established).
Try this :
var DatabaseSingleton = (function () {
var openDbDeferred;
var openDb = function() {
if(!openDbDeferred || openDbDeferred.isRejected()) {
openDbDeferred = $.Deferred();
var db;
var request = window.indexedDB.open("myDb", 2);
request.onsuccess = function(event) {
console.log('db opened', request.result);
db = request.result;
db.onerror = function(event) {
console.error("Database error: " + event.target.error.name);
};
openDbDeferred.resolve(db);
};
request.onerror = function(event) {
console.error('Error opening indexedDB connection.');
openDbDeferred.reject();
};
request.onupgradeneeded = function(event) {
console.log('upgrading the idb', event.target.result);
db = event.target.result;
// create a store for the files
db.createObjectStore("fileInfo", { keyPath: "name" }).createIndex("name", "name", { unique: false });
};
}
return openDbDeferred.promise();
};
return {
//retrive a list of all files in the DB
getFilesList: function() {
return openDb().then(function(db) {
...
});
},
//retrieve metainfo of the file specified by its fileName
getFileinfo: function(fileName) {
return openDb().then(function(db) {
...
});
}
};
}) ();
If you don't want openDb() to keep trying after previous failure, then change :
if(!openDbDeferred || openDbDeferred.isRejected()) {
to :
if(!openDbDeferred) {
I've written a small jquery plugin (still very alpha) that does this:
https://github.com/ameyms/jquery-indexeddb
And I've tried to keep the API super simple:
//Define and initialize an IndexedDB ...
var db = $.idb({
name:'foobar',
version: 2,
drop: stores_to_be_deleted,
stores:list_of_stores
});
// ... Add objects to a store
db.put(items, 'into_store').done(onsuccess);
//.. And delete objects from a store
db.remove('from_store', conditionFunc).done(onremoval);
//.. And not to forget fetching objects from a store
db.select('from_my_store', conditionFunc).done(function (items){
console.log(items)
});
Hope you like it!
The design is not robust in practice, because you cannot reliably use the database connection. It can be blocked at any time by other connection. Without closing the connection, you will find infrequently that, a promise never resolve due to race condition among connections. On the other hand, if you close the connection, how will consumer know the connection is closed.
i have a recursive query like this (note: this is just an example):
var user = function(data)
{
this.minions = [];
this.loadMinions = function()
{
_user = this;
database.query('select * from users where owner='+data.id,function(err,result,fields)
{
for(var m in result)
{
_user.minions[result[m].id] = new user(result[m]);
_user.minions[result[m].id].loadMinions();
}
}
console.log("loaded all minions");
}
}
currentUser = new user(ID);
for (var m in currentUser.minions)
{
console.log("minion found!");
}
this don't work because the timmings are all wrong, the code don't wait for the query.
i've tried to do this:
var MyQuery = function(QueryString){
var Data;
var Done = false;
database.query(QueryString, function(err, result, fields) {
Data = result;
Done = true;
});
while(Done != true){};
return Data;
}
var user = function(data)
{
this.minions = [];
this.loadMinions = function()
{
_user = this;
result= MyQuery('select * from users where owner='+data.id);
for(var m in result)
{
_user.minions[result[m].id] = new user(result[m]);
_user.minions[result[m].id].loadMinions();
}
console.log("loaded all minions");
}
}
currentUser = new user(ID);
for (var m in currentUser.minions)
{
console.log("minion found!");
}
but he just freezes on the while, am i missing something?
The first hurdle to solving your problem is understanding that I/O in Node.js is asynchronous. Once you know how this applies to your problem the recursive part will be much easier (especially if you use a flow control library like Async or Step).
Here is an example that does some of what you're trying to do (minus the recursion). Personally, I would avoid recursively loading a possibly unknown number/depth of records like that; Instead load them on demand, like in this example:
var User = function(data) {
this.data = data
this.minions;
};
User.prototype.getMinions = function(primaryCallback) {
var that = this; // scope handle
if(this.minions) { // bypass the db query if results cached
return primaryCallback(null, this.minions);
}
// Callback invoked by database.query when it has the records
var aCallback = function(error, results, fields) {
if(error) {
return primaryCallback(error);
}
// This is where you would put your recursive minion initialization
// The problem you are going to have is callback counting, using a library
// like async or step would make this party much much easier
that.minions = results; // bypass the db query after this
primaryCallback(null, results);
}
database.query('SELECT * FROM users WHERE owner = ' + data.id, aCallback);
};
var user = new User(someData);
user.getMinions(function(error, minions) {
if(error) {
throw error;
}
// Inside the function invoked by primaryCallback(...)
minions.forEach(function(minion) {
console.log('found this minion:', minion);
});
});
The biggest thing to note in this example are the callbacks. The database.query(...) is asynchronous and you don't want to tie up the event loop waiting for it to finish. This is solved by providing a callback, aCallback, to the query, which is executed when the results are ready. Once that callback fires and after you perform whatever processing you want to do on the records you can fire the primaryCallback with the final results.
Each Node.js process is single-threaded, so the line
while(Done != true){};
takes over the thread, and the callback that would have set Done to true never gets run because the thead is blocked on an infinite loop.
You need to refactor your program so that code that depends on the results of the query is included within the callback itself. For example, make MyQuery take a callback argument:
MyQuery = function(QueryString, callback){
Then call the callback at the end of your database.query callback -- or even supply it as the database.query callback.
The freezing is unfortunately correct behaviour, as Node is single-threaded.
You need a scheduler package to fix this. Personally, I have been using Fibers-promise for this kind of issue. You might want to look at this or another promise library or at async