Show progress when getting list items from SharePoint List - javascript

Now I have a jQuery function for getting the list items from SharePoint List
function getListItems(listTitle, queryText){
var ctx = SP.ClientContext.get_current();
var splist = ctx.get_web().get_lists().getByTitle(listTitle);
var camlQuery = new SP.CamlQuery();
camlQuery.set_viewXml(queryText);
var listItems = splist.getItems(camlQuery);
ctx.load(listItems);
var d = $.Deferred();
ctx.executeQueryAsync(function() {
var result = listItems.get_data().map(function(i){
return i.get_fieldValues();
});
d.resolve(result);
},
function(sender,args){
d.reject(args);
});
return d.promise();
}
And then I call this function
getListItems(listname , "").done(function(listItems){
//do something here...
}).fail(function(error){console.log(error.get_message());}); // Error message
But one of the list contains quite a large amount of records and I want to show the progress to users so that they know what is going on. Is there a way to do this with just client side scripting? Any help is appreciated. Thank you.

Using the provided example you could display only indeterminate progress bar since the request is submitted once to the server and there is no way to determine current status complete.
But since SharePoint JSOM API supports paged data retrieval, you could consider the below approach that allows to determine current status complete and therefore display determinate progress bar.
function getPagedListItems(list, queryText,itemsCount,position){
itemsCount = itemsCount || 100;
var ctx = SP.ClientContext.get_current();
var list = ctx.get_web().get_lists().getByTitle(listTitle);
var ctx = list.get_context();
var camlQuery = new SP.CamlQuery();
if(typeof position != 'undefined')
camlQuery.set_listItemCollectionPosition(position);
var viewXml = String.format("<View>{0}<RowLimit>{1}</RowLimit></View>",queryText,itemsCount);
camlQuery.set_viewXml(viewXml);
var listItems = list.getItems(camlQuery);
ctx.load(list, 'ItemCount');
ctx.load(listItems);
var d = $.Deferred();
ctx.executeQueryAsync(function() {
d.resolve(listItems,list.get_itemCount());
},
function(sender,args){
d.reject(args);
});
return d.promise();
}
function getListItems(listTitle, queryText,itemsCount,position,results){
results = results || [];
return getPagedListItems(listTitle, queryText,itemsCount,position)
.then(function(pagedItems,totalItemCount){
pagedItems.get_data().filter(function(i){
results.push(i.get_fieldValues());
});
var percentLoaded = results.length / totalItemCount * 100;
console.log(String.format('{0}% has been loaded..',percentLoaded));
var pos = pagedItems.get_listItemCollectionPosition();
if(pos != null) {
return getListItems(listTitle, queryText,itemsCount,pos,results);
}
return results;
});
}
Usage
var listTitle = 'Contacts';
getListItems(listTitle , "",20)
.done(function(results){
console.log('Completed');
})
.fail(function(error){
console.log(error.get_message());
});
Results

Short Answer: You can't
Since your query is executed as a single request, there's no way to show "real" progress, although you could fake it by showing a generic "loading" gif.
Long Answer: You can if you really want to
If you were to modify your query to be paged (with a row limit per query), and then execute one request per page until all records are loaded, then you could update something on the page indicating progress.
// Use the RowLimit element to query for only 100 items at a time
camlQuery.set_viewXml("<View>"
+ "<OrderBy><FieldRef Name=\"Created\" /></OrderBy>"
+ "<RowLimit>100</RowLimit>"
+ "</View>");
Now inside the onSuccess function of executeQueryAsync(), you can access the listItemCollectionPosition property of your list item collection and pass that back into your CAML query to get the next page of items.
var itemsCount = listItems.get_count();
// use itemCount to update the current progress as displayed to the user
camlQuery.set_listItemCollectionPosition(listItems.get_listItemCollectionPosition());
// set the query's listItemCollectionPosition so you'll get the next page of results
// reload the items with the updated query
listItems = splist.getItems(camlQuery);
ctx.load(listItems);
ctx.executeQueryAsync(... // rinse and repeat to get the next batch of items
Obviously, this approach would require you to restructure your code to allow an arbitrary number of function calls. You may want to split out your onSuccess function into a named function instead of an anonymous one, so you can execute it somewhat recursively.
When you restructure your code, I also recommend wrapping the entire code block up inside an immediately executing function expression so that your variables can be accessed as needed without polluting the global namespace.
(function(){
//your code here
})();

Related

Why am I getting, "Uncaught TypeError: getEnumerator is not a function"?

In my Sharepoint 2010 Web Part, I've got this Javascript:
function getListItemID(username, payeename, oList) {
var arrayListEnum = oList.getEnumerator();
...which is called by this:
function upsertPostTravelListItemTravelerInfo1() {
var clientContext = SP.ClientContext.get_current();
var oList =
clientContext.get_web().get_lists().getByTitle('PostTravelFormFields');
this.website = clientContext.get_web();
currentUser = website.get_currentUser();
var itemCreateInfo = new SP.ListItemCreationInformation();
this.oListItem = oList.addItem(itemCreateInfo);
var travelersEmail = $('traveleremail').val();
/* If this is an update, the call to getListItemID() will return a val; otherwise (an insert), get from newly instantiated ListItem. */
listId = getListItemID(currentUser, travelersEmail, oList);
I got the basis for this code from here.
But got the err listed above ("Uncaught TypeError: oList.getEnumerator is not a function");
One answer said I needed to add this:
<script type="text/javascript" src="/_layouts/15/sp.js" ></script>
...which I changed from "15" to "14" as that is the folder/version we're using.
That not only didn't work, but was unrecognized. I then found a clue here, namely to add this:
$(document).ready(function () { ExecuteOrDelayUntilScriptLoaded(CustomAction, "sp.js"); });
...but that only an error prior to the one already shown, namely, "Uncaught ReferenceError: CustomAction is not defined"
So what's the scoop? What is required to getEnumerator(), or otherwise retreive the ID val I need?
Here is the full code to that method, to show what I'm trying to accomplish, and how:
function getListItemID(username, payeename, oList) {
var arrayListEnum = oList.getEnumerator();
while (arrayListEnum.moveNext()) {
var listItem = arrayListEnum.get_current();
if (listItem.get_item("ptli_formPreparedBy") === username &&
listItem.get_item("ptli_TravelersEmail") === payeename &&
listItem.get_item("ptli_formCompleted") == false) {
return listItem.get_id();
}
}
return '';
}
UPDATE
When I tried this (first and third lines are new):
<SharePoint:ScriptLinkID="ScriptLink1" Name="SP.js" runat="server" OnDemand="false" LoadAfterUI="true" Localizable="false"></SharePoint:ScriptLink>
<script type="text/javascript">
SP.SOD.executeFunc('sp.js', 'SP.ClientContext', sharePointReady);
...which was inspired by a cat here, I got, "System.Web.HttpParseException was unhandled by user code
Message=The server tag is not well formed."
Personally, I don't think Sharepoint is very well formed. But that's (right) beside the point (no pun intended).
Problem 1: You're calling getEnumerator on a list instead of a list item collection
getEnumerator() can only be called on a list item collection (not on a List object), and only after it's been populated with items by running clientContext.executeQueryAsync()
Problem 2: You need to call executeQueryAsync to populate the list item collection
When using the SharePoint JavaScript client object model, your code needs to be broken up into two parts: the first part specifies what you want to get, and involves you loading queries and commands into an SPClientContext object; the second part lets you manipulate the results of the query to SharePoint, and runs as an asynchronous callback of the query execution.
Create your context, specify which lists you want to access, etc.
Run clientContext.executeQueryAsync() (where clientContext is an SP.ClientContext object), and pass in delegate functions to run on success or failure
In your "onSuccess" delegate function, you can work with the results of the commands you loaded up in step 1
Problem 3: You won't be able to return values directly from an asynchronously executing function
Because step 3 above runs asynchronously, you can't get a return value from it. Any logic that depends on the results that you get in step 3 needs to be moved forward in the execution chain, using function delegation and callbacks.
Problem 4: Inefficient filtering of list items
This is really more of a design flaw than a show-stopping problem, but instead of having your code return every item in the list, and then using JavaScript to enumerate through the results to see if the item you want is in there, you should tell SharePoint what filter options you want before it even executes the query. Then it'll only give you items that match your query.
Use a CAML query for this; CAML (Collaborative Application Markup Language) is an XML-based query language that SharePoint uses extensively. There are plenty of resources and tools for composing CAML queries, and you can even steal the CAML query from a SharePoint list view web part if you've already created a view that matches your query.
Example of how to query a SharePoint list using JavaScript CSOM
Here's an example using parts of your code:
/*
ExecuteOrDelayUntilScriptLoaded(yourcode,"sp.js") makes sure
your code doesn't run until SP.js (the SharePoint JavaScript CSOM)
has been loaded
*/
ExecuteOrDelayUntilScriptLoaded(function(){
var payeename = $('traveleremail').val();
var clientContext = SP.ClientContext.get_current();
var oList = clientContext.get_web().get_lists().getByTitle('PostTravelFormFields');
/* Use a CAML query to filter your results */
var camlQuery = new SP.CamlQuery();
camlQuery.set_viewXml('<View><Query><Where><Eq><FieldRef Name=\'ptli_TravelersEmail\' /><Value Type=\'Text\'>'+payeename+'</Value></Eq></Where></Query></View>');
/* get the list item collection from the list */
var oListItems = oList.getItems(camlQuery);
/* tell SharePoint to load the list items */
clientContext.load(oListItems);
/* execute the query to get the loaded items */
clientContext.executeQueryAsync(
/* onSuccess Function */
Function.createDelegate(this,function(){
/*
now that the query has run, you can get an enumerator
from your list item collection
*/
var arrayListEnum = oListItems.getEnumerator();
var ids = [];
while(arrayListEnum.moveNext()){
var listItem = arrayListItem.get_current();
ids.push(listItem.get_id());
}
alert(ids.length > 0 ? "IDs of matching items: " + ids : "No matching items found!");
}),
/*onFailure Function*/
Function.createDelegate(this,function(sender,args){
alert("Whoops: " + args.get_message() + " " + args.get_stackTrace());
})
);
},"sp.js");
The CAML query in the example code only filters on the ptli_TravelersEmail column; you'd need to add some <And> elements to capture the other two filter conditions you want.
This is what finally worked for me, thanks to Thriggle:
function setListItemID(username, payeename) {
var clientContext = new SP.ClientContext.get_current();
var oList = clientContext.get_web().get_lists().getByTitle('PostTravelFormFields');
/* Use a CAML query to filter your results */
var camlQuery = new SP.CamlQuery();
camlQuery.set_viewXml('<View><Query><Where><Eq><FieldRef Name=\'ptli_TravelersEmail\' /><Value Type=\'Text\'>' + payeename + '</Value></Eq></Where></Query></View>');
/* get the list item collection from the list */
var oListItems = oList.getItems(camlQuery);
/* tell SharePoint to load the list items */
clientContext.load(oListItems);
/* execute the query to get the loaded items */
clientContext.executeQueryAsync(
/* onSuccess Function */
Function.createDelegate(this, function () {
/*
now that the query has run, you can get an enumerator
from your list item collection
*/
var arrayListEnum = oListItems.getEnumerator();
var ids = [];
while (arrayListEnum.moveNext()) {
var listItem = arrayListItem.get_current();
ids.push(listItem.get_id());
}
if (ids.length > 0) {
listId = ids[0];
}
else {
listId = '';
}
}),
/*onFailure Function*/
Function.createDelegate(this, function (sender, args) {
alert("Whoops: " + args.get_message() + " " + args.get_stackTrace());
})
);
}

Assemble paginated ajax data in a Bacon FRP stream

I'm learning FRP using Bacon.js, and would like to assemble data from a paginated API in a stream.
The module that uses the data has a consumption API like this:
// UI module, displays unicorns as they arrive
beautifulUnicorns.property.onValue(function(allUnicorns){
console.log("Got "+ allUnicorns.length +" Unicorns");
// ... some real display work
});
The module that assembles the data requests sequential pages from an API and pushes onto the stream every time it gets a new data set:
// beautifulUnicorns module
var curPage = 1
var stream = new Bacon.Bus()
var property = stream.toProperty()
var property.onValue(function(){}) # You have to add an empty subscriber, otherwise future onValues will not receive the initial value. https://github.com/baconjs/bacon.js/wiki/FAQ#why-isnt-my-property-updated
var allUnicorns = [] // !!! stateful list of all unicorns ever received. Is this idiomatic for FRP?
var getNextPage = function(){
/* get data for subsequent pages.
Skipping for clarity */
}
var gotNextPage = function (resp) {
Array.prototype.push.apply(allUnicorns, resp) // just adds the responses to the existing array reference
stream.push(allUnicorns)
curPage++
if (curPage <= pageLimit) { getNextPage() }
}
How do I subscribe to the stream in a way that provides me a full list of all unicorns ever received? Is this flatMap or similar? I don't think I need a new stream out of it, but I don't know. I'm sorry, I'm new to the FRP way of thinking. To be clear, assembling the array works, it just feels like I'm not doing the idiomatic thing.
I'm not using jQuery or another ajax library for this, so that's why I'm not using Bacon.fromPromise
You also may wonder why my consuming module wants the whole set instead of just the incremental update. If it were just appending rows that could be ok, but in my case it's an infinite scroll and it should draw data if both: 1. data is available and 2. area is on screen.
This can be done with the .scan() method. And also you will need a stream that emits items of one page, you can create it with .repeat().
Here is a draft code (sorry not tested):
var itemsPerPage = Bacon.repeat(function(index) {
var pageNumber = index + 1;
if (pageNumber < PAGE_LIMIT) {
return Bacon.fromCallback(function(callback) {
// your method that talks to the server
getDataForAPage(pageNumber, callback);
});
} else {
return false;
}
});
var allItems = itemsPerPage.scan([], function(allItems, itemsFromAPage) {
return allItems.concat(itemsFromAPage);
});
// Here you go
allItems.onValue(function(allUnicorns){
console.log("Got "+ allUnicorns.length +" Unicorns");
// ... some real display work
});
As you noticed, you also won't need .onValue(function(){}) hack, and curPage external state.
Here is a solution using flatMap and fold. When dealing with network you have to remember that the data can come back in a different order than you sent the requests - that's why the combination of fold and map.
var pages = Bacon.fromArray([1,2,3,4,5])
var requests = pages.flatMap(function(page) {
return doAjax(page)
.map(function(value) {
return {
page: page,
value: value
}
})
}).log("Data received")
var allData = requests.fold([], function(arr, data) {
return arr.concat([data])
}).map(function(arr) {
// I would normally write this as a oneliner
var sorted = _.sortBy(arr, "page")
var onlyValues = _.pluck(sorted, "value")
var inOneArray = _.flatten(onlyValues)
return inOneArray
})
allData.log("All data")
function doAjax(page) {
// This would actually be Bacon.fromPromise($.ajax...)
// Math random to simulate the fact that requests can return out
// of order
return Bacon.later(Math.random() * 3000, [
"Page"+page+"Item1",
"Page"+page+"Item2"])
}
http://jsbin.com/damevu/4/edit

Problems making GET request from jQuery

I'm trying to make an HTTP GET request using the jQuery get() function, but I'm having some trouble.
Here's what my code looks like:
// get the links on the page
var pageLinks = $.find('#pageLinks');
// loop through each of the links
$(pageLinks).find('a').each(function(){
if($(this).attr('title') !== "Next Page"){
// make a GET request to the URL of this link
$.get($(this).attr("href"), function(data) {
console.log("here");
var temp = parse_page(data);
// concatenate the return string with another
bdy = bdy+String(temp);
console.log("done");
});
}
});
There are multiple pages that I need to get data from. Since the get() function is asynchronous, I get the pages in a random order. Secondly, the concatenation does not work. Even though I get each of the pages, they're not put into bdy.
Can anyone suggest how I might deal with this?
Thanks a lot!!
Construct bdy after all pages are retrieved, i.e. store get results in a dictionary or array; wait for all gets to finish; then assemble them in the correct order.
I tried this one and it works:
// get the links on the page
var pageLinks = $('a');
var bdy
// loop through each of the links
$(pageLinks).each(function(){
console.log(this);
// make a GET request to the URL of this link
$.get($(this).attr("href"), function(data) {
// concatenate the return string with another
bdy = bdy + data.toString();
console.log(bdy);
});
});
As an example of what #muratgu has said:
var results = [];
var count = 0;
function allDone() {
var bdy = results.join("");
// do stuff with bdy
}
// get the links on the page
var pageLinks = $.find('#pageLinks');
// filter the links so we're left with the links we want
var wantedLinks = $(pageLinks).find('a').filter(function (idx) {
return $(this).attr('title') !== "Next Page";
});
// remember how many links we're working on
count = wantedLinks.length;
// loop through each of the links
wantedLinks.each(function (idx) {
// make a GET request to the URL of this link
$.get($(this).attr("href"), function (data) {
console.log("here");
var temp = parse_page(data);
results[idx] = temp;
// Decrement the count.
count--;
if (count === 0) {
// All done.
allDone();
}
});
});
You could go further and abstract this into a data type that can perform N async downloads, and then notify you when all are complete.
I just found that there are modules that allow one to manage the control flow in JS. The ones I found are:
Async
Step
For help using the above modules, see my follow up question here.

Javascript: Set the order of functions

I'm writing a titanium app but I'm having an issue with the execution order of my javascript.
I have an event listener on a button. It's a reload button that clears a table, uses HTTPClient to GET a JSON array of 'appointments', saves each appointment, and refreshes a table list. The problem is I am executing the table delete first which should clear the table, then I get the appointments but when the app refreshes the datatable it's like it's doing it too soon and the new appointments haven't been saved yet because I'm getting an empty list. Now if I comment out the db.deleteAll line, each time I click reload the list is refreshed with the new (and existing) appointment data.
I need to make sure everything is done in order and only when the previous task is dfinished. So appointments.download() has to be executed AFTER db.DeleteAll and the list refresh has to be executed AFTER var allAppointments = db.All();
I think the problem is that the appointments.download() function has to make a HTTP GET call and then save the results and the other functions are not waiting until it's finished.
Here is the code:
btnReload.addEventListener('click', function(e){
var affected = db.deleteAll();
appointments.download();
var allAppointments = db.all();
Ti.API.info(allAppointments);
appointmentList.setData(allAppointments);
});
Here are the functions that are being called:
db.deleteAll():
api.deleteAll = function(){
conn.execute('DELETE FROM appointments');
return conn.rowsAffected;
}
appointments.download():
var appointments = (function() {
var api = {};
api.download = function(){
var xhr = Titanium.Network.createHTTPClient();
xhr.onload = function()
{
var data = JSON.parse(this.responseText);
var dl = (data.length);
for(i=0; i<dl;i++)
{
//p = addRow(data,i); // returns the **arr array
//Ti.API.info('Saving : '+data[i].first_name);
var contact_name = data[i].first_name + ' ' + data[i].last_name;
var start_date = data[i].start_date;
var reference = data[i].reference;
var comment = data[i].comment;
var appointment_id = data[i].quote_id;
var lastid = db.create(appointment_id, start_date, reference, contact_name, comment);
//Ti.API.info(lastid);
}
};
xhr.open('GET','http://********.co.uk/appointments/download/');
xhr.send();
return;
}
Any help most appreciated!
Billy
Synchronous calls give you coordination (code won't execute until any computation it depends on finishes) for free. With asynchronous calls, you have to take care of coordination. This generally means passing the dependent code as a function to the asynchronous code. The passed code is known as a "continuation", which means "the rest of the calculation, from a given point forward". Passing continuations around is known as (unsurprisingly) "continuation passing style".
To rewrite code in CPS, identify the point(s) where you need to coordinate the code (the call to appointments.download), then wrap the rest of the code in a function.
btnReload.addEventListener('click', function(e){
var affected = db.deleteAll();
appointments.download();
function () {
var allAppointments = db.all();
Ti.API.info(allAppointments);
appointmentList.setData(allAppointments);
}
});
In the general case, the return value becomes the argument to the continuation. Here, no return value for appointments.download is used, so the continuation takes no arguments.
Next, rewrite the asynchronous function to take the continuation and pass the continuation in the call.
btnReload.addEventListener('click', function(e){
var affected = db.deleteAll();
appointments.download(
function () {
var allAppointments = db.all();
Ti.API.info(allAppointments);
appointmentList.setData(allAppointments);
});
});
...
api.download = function(_return){
var xhr = Titanium.Network.createHTTPClient();
xhr.onload = function() {
var data = JSON.parse(this.responseText);
var dl = (data.length);
for (i=0; i<dl;i++) {
//p = addRow(data,i); // returns the **arr array
//Ti.API.info('Saving : '+data[i].first_name);
var contact_name = data[i].first_name + ' ' + data[i].last_name;
var start_date = data[i].start_date;
var reference = data[i].reference;
var comment = data[i].comment;
var appointment_id = data[i].quote_id;
var lastid = db.create(appointment_id, start_date, reference, contact_name, comment);
//Ti.API.info(lastid);
}
_return();
};
xhr.open('GET','http://********.co.uk/appointments/download/');
xhr.send();
return;
}
The continuation is named _return because the return statement can be modeled as a continuation (the default continuation). Calling _return in the asynchronous version would have the same affect as calling return in the synchronous version.
Currently you are making requests asynchronously which means you make a request and return from the function immediately, you don't wait for an answer. You should make your calls synchronous, I don't know what your conn and xhr really are but they might provide ways to make the execute() and send() methods synchronous. For example if you set the third argument of JavaScript's own XMLHttpRequest's open() method to false then send() method will not return until a response is received from the server, your connection classes might have the same option.
Move the call to delete the current appointments into the onload handler. That way you will delete the old and immediately add the new data.

Chrome Extension with Database API interface

I want to update a div with a list of anchors that I generate from a local database in chrome. It's pretty simple stuff, but as soon as I try to add the data to the main.js file via a callback everything suddenly becomes undefined. Or the array length is set to 0. ( When it's really 18. )
Initially, I tried to install it into a new array and pass it back that way.
Is there a setting that I need to specify in the chrome manifest.json in order to allow for communication with the database API? I've checked, but all I've been able to find was 'unlimited storage'
The code is as follows:
window.main = {};
window.main.classes = {};
(function(awe){
awe.Data = function(opts){
opts = opts || new Object();
return this.init(opts);
};
awe.Data.prototype = {
init:function(opts){
var self = this;
self.modified = true;
var db = self.db = openDatabase("buddy","1.0","LocalDatabase",200000);
db.transaction(function(tx){
tx.executeSql("CREATE TABLE IF NOT EXISTS listing ( name TEXT UNIQUE, url TEXT UNIQUE)",[],function(tx,rs){
$.each(window.rr,function(index,item){
var i = "INSERT INTO listing (name,url)VALUES('"+item.name+"','"+item.url+"')";
tx.executeSql(i,[],null,null);
});
},function(tx,error){
});
});
self._load()
return this;
},
add:function(item){
var self = this;
self.modified = true;
self.db.transaction(function(tx){
tx.executeSql("INSERT INTO listing (name,url)VALUES(?,?)",[item.name,item.url],function(tx,rs){
//console.log('success',tx,rs)
},function(tx,error){
//console.log('error',error)
})
});
self._load()
},
remove:function(item){
var self = this;
self.modified = true;
self.db.transaction(function(tx){
tx.executeSql("DELETE FROM listing where name='"+item.name+"'",[],function(tx,rs){
//console.log('success',tx,rs)
},function(tx,error){
//console.log('error',tx,error);
});
});
self._load()
},
_load:function(callback){
var self = this;
if(!self.modified)
return;
self.data = new Array();
self.db.transaction(function(tx){
tx.executeSql('SELECT name,url FROM listing',[],function(tx,rs){
console.log(callback)
for(var i = 0; i<rs.rows.length;i++)
{
callback(rs.rows.item(i).name,rs.rows.item(i).url)
// var row = rs.rows.item(i)
// var n = new Object()
// n['name'] = row['name'];
// n['url'] = row['url'];
}
},function(tx,error){
//console.log('error',tx,error)
})
})
self.modified = false
},
all:function(cb){
this._load(cb)
},
toString:function(){
return 'main.Database'
}
}
})(window.main.classes);
And the code to update the list.
this.database.all(function(name,url){
console.log('name','url')
console.log(name,url)
var data = []
$.each(data,function(index,item){
try{
var node = $('<div > '+item.name + '</div>');
self.content.append(node);
node.unbind();
node.bind('click',function(evt){
var t = $(evt.target).attr('href');
chrome.tabs.create({
"url":t
},function(evt){
self._tab_index = evt.index
});
});
}catch(e){
console.log(e)
}
})
});
From looking at your code above, I notice you are executing "self._load()" at the end of each function in your API. The HTML5 SQL Database is asynchronous, you can never guarantee the result. In this case, I would assume the result will always be 0 or random because it will be a race condition.
I have done something similar in my fb-exporter extension, feel free to see how I have done it https://github.com/mohamedmansour/fb-exporter/blob/master/js/database.js
To solve a problem like this, did you check the Web Inspector and see if any errors occurs in the background page. I assume this is all in a background page eh? Try to see if any error occurs, if not, I believe your encountering a race condition. Just move the load within the callback and it should properly call the load.
Regarding your first question with the unlimited storage manifest attribute, you don't need it for this case, that shouldn't be the issue. The limit of web databases is 5MB (last I recall, it might have changed), if your using a lot of data manipulation, then you use that attribute.
Just make sure you can guarantee the this.database.all is running after the database has been initialized.

Categories

Resources