Chrome Extension with Database API interface - javascript

I want to update a div with a list of anchors that I generate from a local database in chrome. It's pretty simple stuff, but as soon as I try to add the data to the main.js file via a callback everything suddenly becomes undefined. Or the array length is set to 0. ( When it's really 18. )
Initially, I tried to install it into a new array and pass it back that way.
Is there a setting that I need to specify in the chrome manifest.json in order to allow for communication with the database API? I've checked, but all I've been able to find was 'unlimited storage'
The code is as follows:
window.main = {};
window.main.classes = {};
(function(awe){
awe.Data = function(opts){
opts = opts || new Object();
return this.init(opts);
};
awe.Data.prototype = {
init:function(opts){
var self = this;
self.modified = true;
var db = self.db = openDatabase("buddy","1.0","LocalDatabase",200000);
db.transaction(function(tx){
tx.executeSql("CREATE TABLE IF NOT EXISTS listing ( name TEXT UNIQUE, url TEXT UNIQUE)",[],function(tx,rs){
$.each(window.rr,function(index,item){
var i = "INSERT INTO listing (name,url)VALUES('"+item.name+"','"+item.url+"')";
tx.executeSql(i,[],null,null);
});
},function(tx,error){
});
});
self._load()
return this;
},
add:function(item){
var self = this;
self.modified = true;
self.db.transaction(function(tx){
tx.executeSql("INSERT INTO listing (name,url)VALUES(?,?)",[item.name,item.url],function(tx,rs){
//console.log('success',tx,rs)
},function(tx,error){
//console.log('error',error)
})
});
self._load()
},
remove:function(item){
var self = this;
self.modified = true;
self.db.transaction(function(tx){
tx.executeSql("DELETE FROM listing where name='"+item.name+"'",[],function(tx,rs){
//console.log('success',tx,rs)
},function(tx,error){
//console.log('error',tx,error);
});
});
self._load()
},
_load:function(callback){
var self = this;
if(!self.modified)
return;
self.data = new Array();
self.db.transaction(function(tx){
tx.executeSql('SELECT name,url FROM listing',[],function(tx,rs){
console.log(callback)
for(var i = 0; i<rs.rows.length;i++)
{
callback(rs.rows.item(i).name,rs.rows.item(i).url)
// var row = rs.rows.item(i)
// var n = new Object()
// n['name'] = row['name'];
// n['url'] = row['url'];
}
},function(tx,error){
//console.log('error',tx,error)
})
})
self.modified = false
},
all:function(cb){
this._load(cb)
},
toString:function(){
return 'main.Database'
}
}
})(window.main.classes);
And the code to update the list.
this.database.all(function(name,url){
console.log('name','url')
console.log(name,url)
var data = []
$.each(data,function(index,item){
try{
var node = $('<div > '+item.name + '</div>');
self.content.append(node);
node.unbind();
node.bind('click',function(evt){
var t = $(evt.target).attr('href');
chrome.tabs.create({
"url":t
},function(evt){
self._tab_index = evt.index
});
});
}catch(e){
console.log(e)
}
})
});

From looking at your code above, I notice you are executing "self._load()" at the end of each function in your API. The HTML5 SQL Database is asynchronous, you can never guarantee the result. In this case, I would assume the result will always be 0 or random because it will be a race condition.
I have done something similar in my fb-exporter extension, feel free to see how I have done it https://github.com/mohamedmansour/fb-exporter/blob/master/js/database.js
To solve a problem like this, did you check the Web Inspector and see if any errors occurs in the background page. I assume this is all in a background page eh? Try to see if any error occurs, if not, I believe your encountering a race condition. Just move the load within the callback and it should properly call the load.
Regarding your first question with the unlimited storage manifest attribute, you don't need it for this case, that shouldn't be the issue. The limit of web databases is 5MB (last I recall, it might have changed), if your using a lot of data manipulation, then you use that attribute.
Just make sure you can guarantee the this.database.all is running after the database has been initialized.

Related

Doesn't add records into PouchDB when used same function over again

I'm trying to create a database with "users" and their data in it. Strangely it doesn't put() new variables in it when I try to for the third time. To do all this I create a local database dblocal and replicate this DB to the remote db called dbremote. At first I create a document with one variable.
function newuser() {
if (window.document.consent_form.consent_to_share.value) {
var id = "p" + Date.now() + "-" + Math.floor(Math.random() * 10000);
var dblocal = new PouchDB(id);
var consenttoshare = window.document.consent_form.consent_to_share.value;
document.cookie = id;
var dbremote = 'http://localhost:5984/experiment';
dblocal.put({
_id: id,
consent: consenttoshare
});
dblocal.replicate.to(dbremote, {live: true});
}
}
This all worked well, in another js file I'm trying to add a variable to the same document by executing the following function putdb(). Im doing this in the following way (as said in their documentation is the right way):
function putdb () {
if (document.cookie){
var id = document.cookie;
var loggedin = "True";
var dblocal = new PouchDB(id);
dblocal.get(id).then(function (doc) {
doc.loggedin = loggedin;
return dblocal.put(doc);
}).then(function () {
return dblocal.get(id);
}).then(function (doc) {
console.log(doc);
var dbremote = 'http://localhost:5984/experiment';
dblocal.replicate.to(dbremote, {live: true});
});
}
}
This succesfully added the variable loggedin to the document as I wanted. However upon trying to add information to this document for the third time (again in another js file), nothing happens. I used exactly the same approach as before but only use different variables.
function putdb (checked) {
if (document.cookie) {
var id = document.cookie;
var checkedlist = [];
for (i = 0; i < checked; i++) {
checkedlist.push($("input[type=checkbox]:checked")[i].value)
}
var playlistname = document.getElementById("playlistname").value;
var dblocal = new PouchDB(id);
dblocal.get(id).then(function (doc) {
doc.checkedlist = checkedlist;
doc.playlistname = playlistname;
return dblocal.put(doc);
}).then(function () {
return dblocal.get(id);
}).then(function (doc) {
console.log(doc);
var dbremote = 'http://localhost:5984/experiment';
dblocal.replicate.to(dbremote, {live: true});
});
}
}
I checked all variables, they are correct.
I tried plain text variables.
The script does run.
I tried to add information to the document the way I did the first time.
None of all this seems to add another variable to the document as I wanted in the last function. I think it has to do with the way pouchDB works which I don't know. help is much appreciated!
There are a number of problems in your code that results in bad usage of PouchDB, and may lead to problems.
First of all, it does not make a lot of sense to give your document the same id as the name of your database. Assuming you want a one database per user approach, there are two approaches you can follow.
Multiple document approach
You can instead make multiple documents within the same database with different id's. For instance, your 'consent' information may be stored like this:
var id = "p" + Date.now() + "-" + Math.floor(Math.random() * 10000);
let dblocal = new PouchDB(id);
document.cookie = id;
let dbremote = 'http://localhost:5984/experiment';
dblocal.put({
_id: "consent",
consent: window.document.consent_form.consent_to_share.value
});
dblocal.replicate.to(dbremote, {live: true});
While your playlist information is stored like this:
dblocal.put({
_id: "playlist",
name: playlistname,
itemsChecked: checkedlist
});
Single-document approach
The second option is to store a single document containing all the information you want to store that is associated to a user. In this approach you will want to fetch the existing document and update it when there is new information. Assuming you named your document global-state (i.e. replace "consent" in the first code snippet with "global-state"), the following code will update a document:
dblocal.get("global-state").then((doc)=>{
doc.loggedIn = true; // or change any other information you want
return dblocal.put(doc);
}).then((response)=>{
//handle response
}).catch((err)=>{
console.log(err);
});
Furthermore, you should only call the
dblocal.replicate.to(dbremote, {live: true});
function once because the 'live' option specifies that future changes will automatically be replicated to the remote database.

GlideAjax request doesn't return any response

I'm newer in servicenow developing.
I try to create a bundle "Script Include" - "Client Script".
Using background script I see, that my script include works fine.
But when I try to call this include via client script, it doesn't return any response.
Here is my method in Script Include:
usersCounter: function () {
var gr = new GlideRecord('sys_user');
gr.query();
var users = gr.getRowCount();
gs.info('Number of users'+ ' ' + users);
return users;
And here is my client script:
var ga = new GlideAjax('SCI_Training_ScriptIncludeOnChange');
ga.addParam('sysparm_name', 'usersCounter');
ga.getXML(getUsers);
function getUsers(response) {
var numberOfUsers = response.responseXML.documentElement.getAttribute("answer");
g_form.clearValue('description');
console.log(numberOfUsers);
And I have null in my console.
What have I missed?
Irrespective of why it's not working, you probably want to change your server side GlideRecord to use GlideAggregate instead, and just let mysql return the row count:
var gr = new GlideAggregate('sys_user');
gr.addAggregate('COUNT');
gr.query();
gr.next();
var users = gr.getAggregate('COUNT');
gs.info('Number of users'+ ' ' + users);
return users;
Doing a GlideRecord#query with no where clause is essentially doing a "SELECT * FROM sys_user", bringing over all the data, when all you're asking for is the row count from the metadata in the result set.
Beyond that, make sure your Script Include properly extends AbstractAjaxProcessor and has the client-callable field set to true per this:
https://docs.servicenow.com/bundle/geneva-servicenow-platform/page/script/server_scripting/reference/r_ExamplesOfAsynchronousGlideAjax.html
You can try to debug your getUsers() method. Try to check what the object structure of response is.
You could also use
var ga = new GlideAjax('SCI_Training_ScriptIncludeOnChange');
ga.addParam('sysparm_name', 'usersCounter');
ga.getXMLAnswer(getUsers);
function getUsers(response) {
var numberOfUsers = response;
g_form.clearValue('description');
console.log(numberOfUsers);
}

How to get all the DB-IDs in Autodesk Forge

I need to get all the DB-Ids from the Autodesk forge model. I have referred the code from https://forge.autodesk.com/cloud_and_mobile/2015/12/select-all-elements-in-the-viewer-with-view-and-data-api-with-javascript.html
I also have tried it in my own extension and the code is as follows.
AutodeskNamespace("Autodesk.ADN.Viewing.Extension");
Autodesk.ADN.Viewing.Extension.Color = function(viewer, options) {
Autodesk.Viewing.Extension.call(this, viewer, options);
var _self = this;
var _viewer = viewer;
var instanceTree = viewer.model.getData().instanceTree;
var rootId = this.rootId = instanceTree.getRootId();
_self.load = function() {
getgetAlldbIds(rootId);
};
function getAlldbIds(rootId) {
var alldbId = [];
if (!rootId) {
return alldbId;
}
var queue = [];
queue.push(rootId);
while (queue.length > 0) {
var node = queue.shift();
alldbId.push(node);
instanceTree.enumNodeChildren(node, function(childrenIds) {
queue.push(childrenIds);
});
}
console.log(alldbId);
}
};
But I'm getting an error in Developer Tools as cannot read property 'getData' of null where do you think I'm going wrong. thanks in advance.
The problem must be that the model is not fully loaded, so you should wait for that event (Autodesk.Viewing.GEOMETRY_LOADED_EVENT). It might be better to wait for the object tree created event as well (Autodesk.Viewing.OBJECT_TREE_CREATED_EVENT) - see discussion here: How to Retrieve Forge Viewer objectTree?
By the way, there is an easier way now to get all the dbIds: Get all database id's in the model

Show progress when getting list items from SharePoint List

Now I have a jQuery function for getting the list items from SharePoint List
function getListItems(listTitle, queryText){
var ctx = SP.ClientContext.get_current();
var splist = ctx.get_web().get_lists().getByTitle(listTitle);
var camlQuery = new SP.CamlQuery();
camlQuery.set_viewXml(queryText);
var listItems = splist.getItems(camlQuery);
ctx.load(listItems);
var d = $.Deferred();
ctx.executeQueryAsync(function() {
var result = listItems.get_data().map(function(i){
return i.get_fieldValues();
});
d.resolve(result);
},
function(sender,args){
d.reject(args);
});
return d.promise();
}
And then I call this function
getListItems(listname , "").done(function(listItems){
//do something here...
}).fail(function(error){console.log(error.get_message());}); // Error message
But one of the list contains quite a large amount of records and I want to show the progress to users so that they know what is going on. Is there a way to do this with just client side scripting? Any help is appreciated. Thank you.
Using the provided example you could display only indeterminate progress bar since the request is submitted once to the server and there is no way to determine current status complete.
But since SharePoint JSOM API supports paged data retrieval, you could consider the below approach that allows to determine current status complete and therefore display determinate progress bar.
function getPagedListItems(list, queryText,itemsCount,position){
itemsCount = itemsCount || 100;
var ctx = SP.ClientContext.get_current();
var list = ctx.get_web().get_lists().getByTitle(listTitle);
var ctx = list.get_context();
var camlQuery = new SP.CamlQuery();
if(typeof position != 'undefined')
camlQuery.set_listItemCollectionPosition(position);
var viewXml = String.format("<View>{0}<RowLimit>{1}</RowLimit></View>",queryText,itemsCount);
camlQuery.set_viewXml(viewXml);
var listItems = list.getItems(camlQuery);
ctx.load(list, 'ItemCount');
ctx.load(listItems);
var d = $.Deferred();
ctx.executeQueryAsync(function() {
d.resolve(listItems,list.get_itemCount());
},
function(sender,args){
d.reject(args);
});
return d.promise();
}
function getListItems(listTitle, queryText,itemsCount,position,results){
results = results || [];
return getPagedListItems(listTitle, queryText,itemsCount,position)
.then(function(pagedItems,totalItemCount){
pagedItems.get_data().filter(function(i){
results.push(i.get_fieldValues());
});
var percentLoaded = results.length / totalItemCount * 100;
console.log(String.format('{0}% has been loaded..',percentLoaded));
var pos = pagedItems.get_listItemCollectionPosition();
if(pos != null) {
return getListItems(listTitle, queryText,itemsCount,pos,results);
}
return results;
});
}
Usage
var listTitle = 'Contacts';
getListItems(listTitle , "",20)
.done(function(results){
console.log('Completed');
})
.fail(function(error){
console.log(error.get_message());
});
Results
Short Answer: You can't
Since your query is executed as a single request, there's no way to show "real" progress, although you could fake it by showing a generic "loading" gif.
Long Answer: You can if you really want to
If you were to modify your query to be paged (with a row limit per query), and then execute one request per page until all records are loaded, then you could update something on the page indicating progress.
// Use the RowLimit element to query for only 100 items at a time
camlQuery.set_viewXml("<View>"
+ "<OrderBy><FieldRef Name=\"Created\" /></OrderBy>"
+ "<RowLimit>100</RowLimit>"
+ "</View>");
Now inside the onSuccess function of executeQueryAsync(), you can access the listItemCollectionPosition property of your list item collection and pass that back into your CAML query to get the next page of items.
var itemsCount = listItems.get_count();
// use itemCount to update the current progress as displayed to the user
camlQuery.set_listItemCollectionPosition(listItems.get_listItemCollectionPosition());
// set the query's listItemCollectionPosition so you'll get the next page of results
// reload the items with the updated query
listItems = splist.getItems(camlQuery);
ctx.load(listItems);
ctx.executeQueryAsync(... // rinse and repeat to get the next batch of items
Obviously, this approach would require you to restructure your code to allow an arbitrary number of function calls. You may want to split out your onSuccess function into a named function instead of an anonymous one, so you can execute it somewhat recursively.
When you restructure your code, I also recommend wrapping the entire code block up inside an immediately executing function expression so that your variables can be accessed as needed without polluting the global namespace.
(function(){
//your code here
})();

How do I update data in indexedDB?

I have tried to get some information from W3C regarding the update of an objectStore item in a indexedDB database, but with not so much susccess.
I found here a way to do it, but it doesn't really work for me.
My implementation is something like this
DBM.activitati.edit = function(id, obj, callback){
var transaction = DBM.db.transaction(["activitati"], IDBTransaction.READ_WRITE);
var objectStore = transaction.objectStore("activitati");
var keyRange = IDBKeyRange.only(id);
objCursor = objectStore.openCursor(keyRange);
objCursor.onsuccess = function(e){
var cursor = e.target.result;
console.log(obj);
var request = cursor.update(obj);
request.onsuccess = function(){
callback();
}
request.onerror = function(e){
conosole.log("DBM.activitati.edit -> error " + e);
}
}
objCursor.onerror = function(e){
conosole.log("DBM.activitati.edit -> error " + e);
}
}
I have all DBM.activitati.(add | remove | getAll | getById | getByIndex) methods working, but I can not resolve this.
If you know how I can manage it, please, do tell!
Thank you!
Check out this jsfiddle for some examples on how to update IDB records. I worked on that with another StackOverflower -- it's a pretty decent standalone example of IndexedDB that uses indexes and does updates.
The method you seem to be looking for is put, which will either insert or update a record if there are unique indexes. In that example fiddle, it's used like this:
phodaDB.indexedDB.addUser = function(userObject){
//console.log('adding entry: '+entryTxt);
var db = phodaDB.indexedDB.db;
var trans = db.transaction(["userData"],IDBTransaction.READ_WRITE);
var store = trans.objectStore("userData");
var request = store.put(userObject);
request.onsuccess = function(e){
phodaDB.indexedDB.getAllEntries();
};
request.onerror = function(e){
console.log('Error adding: '+e);
};
};
For what it's worth, you've got some possible syntax errors, misspelling "console" in console.log as "conosole".
A bit late for an answer, but possible it helps others. I still stumbled -as i guess- over the same problem, but it's very simple:
If you want to INSERT or UPDATE records you use objectStore.put(object) (help)
If you only want to INSERT records you use objectStore.add(object) (help)
So if you use add(object), and a record key still exists in DB, it will not overwritten and fires error 0 "ConstraintError: Key already exists in the object store".
If you use put(object), it will be overwritten.
this is case of update infos of an user object
var transaction = db.transaction(["tab_user"], "readwrite");
var store = transaction.objectStore("tab_user");
var req = store.openCursor();
req.onerror = function(event) {
console.log("case if have an error");
};
req.onsuccess = function(event) {
var cursor = event.target.result;
if(cursor){
if(cursor.value.idUser == users.idUser){//we find by id an user we want to update
var user = {};
user.idUser = users.idUser ;
user.nom = users.nom ;
var res = cursor.update(user);
res.onsuccess = function(e){
console.log("update success!!");
}
res.onerror = function(e){
console.log("update failed!!");
}
}
cursor.continue();
}
else{
console.log("fin mise a jour");
}
}
I'm a couple of years late, but thought it'd be nice to add my two cents in.
First, check out BakedGoods if you don't want to deal with the complex IndexedDB API.
It's a library which establishes a uniform interface that can be used to conduct storage operations in all native, and some non-native client storage facilities. It also maintains the flexibility and options afforded to the user by each. Oh, and it's maintained by yours truly :) .
With it, placing one or more data items in an object store can be as simple as:
bakedGoods.set({
data: [{key: "key1", value: "value1"}, {key: "key2", value: "value2"}),
storageTypes: ["indexedDB"],
complete: function(byStorageTypeResultDataObj, byStorageTypeErrorObj){}
});
Now to answer the actual question...
Lets begin by aggregating the valuable information spread across the existing answers:
IDBObjectStore.put() adds a new record to the store, or updates an existing one
IDBObjectStore.add() adds a new record to the store
IDBCursor.update() updates the record at the current position of the cursor
As one can see, OP is using an appropriate method to update a record. There are, however, several things in his/her code, unrelated to the method, that are incorrect (with respect to the API today at least). I've identified and corrected them below:
var cursorRequest = objectStore.openCursor(keyRange); //Correctly define result as request
cursorRequest.onsuccess = function(e){ //Correctly set onsuccess for request
var objCursor = cursorRequest.result; //Get cursor from request
var obj = objCursor.value; //Get value from existing cursor ref
console.log(obj);
var request = objCursor.update(obj);
request.onsuccess = function(){
callback();
}
request.onerror = function(e){
console.log("DBM.activitati.edit -> error " + e); //Use "console" to log :)
}
}
cursorRequest.onerror = function(e){ //Correctly set onerror for request
console.log("DBM.activitati.edit -> error " + e); //Use "console" to log :)
}

Categories

Resources