My idea is download chunks a huge file from server.
Chunks stored into an IndexedDB.
After download all chunks, merge all records into one (as SQL SELECT * FROM XXX ...).
Save into disk "saveAs()" or create URL to IndexedDB...
(Or any idea?)
I do not know how to do step 2. and 3.
(Below is an example of storing 10,000 records in the DB and the expected output after step 2. should be SusanSusanSusanSusanSusan...)
if (transaction) {
transaction.oncomplete = function () {
}
transaction.onabort = function () {
writeToConsoleScreen("transaction aborted.");
localDatabase.db.close();
}
transaction.ontimeout = function () {
writeToConsoleScreen("transaction timeout.");
localDatabase.db.close();
}
var store = transaction.objectStore(osTableName);
if (store) {
var req;
var customer = {};
// create ten thousand records
for (var loop = 0; loop < 10000; loop++) {
customer = {};
customer.fname = 'Susan';
req = store.add(customer);
req.onsuccess = function (ev) {
}
req.onerror = function (ev) {
writeToConsoleScreen("Failed to add record." + " Error: " + ev.message);
}
}
}
}
<!DOCTYPE html>
<script>
var open = indexedDB.open('chunks-example');
open.onupgradeneeded = function() {
// Create schema if necessary
var db = open.result;
db.createObjectStore('chunks');
};
// 1. Chunks stored into an IndexedDB.
open.onsuccess = function() {
var db = open.result;
var tx = db.transaction('chunks', 'readwrite');
var store = tx.objectStore('chunks');
for (var i = 0; i < 10; ++i) {
// For realz, this would be via
// XMLHttpRequest.response and async.
var chunk = new Blob(['chunk ' + i + '\n'],
{type: 'application/octet-stream'});
store.put(chunk, i);
}
tx.oncomplete = function() { merge(db); };
};
// 2. After "download" all chunks, merge all records into one
function merge(db) {
var tx = db.transaction('chunks');
var store = tx.objectStore('chunks');
var chunks = [];
var request = store.openCursor();
request.onsuccess = function() {
var cursor = request.result;
if (cursor) {
chunks.push(cursor.value);
cursor.continue();
} else {
saveAs('myfile', new Blob(chunks,
{type: 'application/octet-stream'}));
}
};
}
// 3. Save into disk "saveAs()"
function saveAs(filename, blob) {
var a = document.documentElement.appendChild(document.createElement('a'));
a.href = URL.createObjectURL(blob);
a.download = filename;
a.click();
a.parentElement.remove(a);
}
</script>
Related
Here is my code I want to return getConn.result and use it in my js page. Could you help me?
function readObjectStore(storeName) {
// Open (or create) the database
var open = indexedDB.open("Publisher", 1);
open.onupgradeneeded = function () {}
open.onsuccess = function () {
var db = open.result;
var tx = db.transaction(storeName, "readwrite");
var store = tx.objectStore(storeName);
if (storeName == "connectionStrings") {
var getConn = store.getAll();
getConn.onsuccess = function () {
return getConn.result;
}
}
}
}
Can anyone assist me with loading an array with excel data and returning it as a function? This is my initial code:
var excel = require('exceljs');
var wb = new excel.Workbook();
var path = require('path');
var filePath = path.resolve(__dirname,'data.xlsx');
function signIn(){
var SignIn = [];
wb.xlsx.readFile(filePath).then(function(){
var sh = wb.getWorksheet("Sheet1");
for(var i = 1; i < 3; i++){
SignIn.push(sh.getRow(i).getCell(2).value);
}
});
return SignIn
}
Workbook.readFile is aynchronous, you need to use either a callback or promise type approach. Using promises we can try:
var excel = require('exceljs');
var wb = new excel.Workbook();
var path = require('path');
var filePath = path.resolve(__dirname,'data.xlsx');
function signIn() {
var SignIn = [];
return wb.xlsx.readFile(filePath).then( () => {
var sh = wb.getWorksheet("Sheet1");
for(var i = 1; i < 3; i++){
SignIn.push(sh.getRow(i).getCell(2).value);
}
return SignIn;
});
}
async function testReadData() {
try {
let data = await signIn();
console.log('testReadData: Loaded data: ', data);
} catch (error) {
console.error('testReadData: Error occurred: ', error);
}
}
testReadData();
Or you can use a callback type approach:
function signInWithCallback(callback) {
var SignIn = [];
wb.xlsx.readFile(filePath).then(function(){
var sh = wb.getWorksheet("Sheet1");
for(var i = 1; i < 3; i++){
SignIn.push(sh.getRow(i).getCell(2).value);
}
callback(SignIn);
});
}
signInWithCallback((data) => console.log('Callback: Data: ', data));
Alright, here's what the plan is. Go through each file, add the file into the array. Once all files are added, then combine them using the JSZipUtility and Docxtemplater:
'click .merge-icon': (e) => {
var programId = Router.current().url.split('/').pop();
var programObj = Programs.findOne(programId);
var insertedDocuments = [];
var i = 0;
var count = programObj.activityIds.count;
var fileDownloadPromise = new Promise((resolve, reject) => {
programObj.activityIds.forEach(function(activityId) {
var activityObj = Activities.findOne(activityId);
var documentObj = ActivityFiles.findOne(activityObj.documents.pop()._id);
JSZipUtils.getBinaryContent(documentObj.url(), callback);
function callback(error, content) {
var zip = new JSZip(content);
var doc = new Docxtemplater().loadZip(zip);
var xml = zip.files[doc.fileTypeConfig.textPath].asText();
xml = xml.substring(xml.indexOf("<w:body>") + 8);
xml = xml.substring(0, xml.indexOf("</w:body>"));
xml = xml.substring(0, xml.indexOf("<w:sectPr"));
insertedDocuments.push(xml);
i++;
if (i == count - 1) {
resolve();
}
}
});
});
fileDownloadPromise.then(() => {
JSZipUtils.getBinaryContent('/assets/template.docx', callback);
function callback(error, content) {
console.log(content);
var zip = new JSZip(content);
var doc = new Docxtemplater().loadZip(zip);
setData(doc);
}
function setData(doc) {
doc.setData({
body: insertedDocuments.join('<w:br/><w:br/>')
});
doc.render();
useResult(doc);
}
function useResult(doc) {
var out = doc.getZip().generate({
type: 'blob',
mimeType: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'
});
saveAs(out, programObj.name + '.docx');
}
});
}
Turns out nothing's happening. What's wrong with the execution of this Promise here ?
I'm only calling resolve when every file has been loaded in the array.
I have code to create an indexedDB here:
function create_db() {
var indexedDB = window.indexedDB || window.webkitIndexedDB || window.msIndexedDB;
var request = indexedDB.open(“photos”, 2);
request.onupgradeneeded = function(event) {
var db = event.target.result;
// Create photo db
var photo_store = db.createObjectStore("photos", {keyPath: "photo_id"});
var photo_id_index = photo_store.createIndex("by_photo_id", "photo_id", {unique: true});
var dest_id_index = photo_store.createIndex("by_destination_id", "destination_id");
console.log(“store created”);
};
request.onsuccess = function(event) {
console.log(“store opened”);
};
request.onerror = function(event) {
console.log("error: " + event);
};
}
My code to remove entries:
function remove_photos = function (destination_id, db) {
var transaction = db.transaction("photos", "readwrite");
var store = transaction.objectStore("photos");
var index = store.index("by_destination_id");
var request = index.openCursor(IDBKeyRange.only(destination_id));
request.onsuccess = function() {
var cursor = request.result;
if (cursor) {
cursor.delete();
cursor.continue();
}
};
}
How can I delete records using the by_destination_id index so that I can delete all records with a given destination_id, which is an integer?
Thanks for any help.
I found the solution to my issue, the IDBKeyRange.only function doesn't like integers it needs to be a string, so substituting this line in:
var request = index.openCursor(IDBKeyRange.only(destination_id.toString()));
Make the code work.
I'm doing few tests with indexedDB and I keep getting chrome blocked during an insert test.
Basically what I'm doing is a simple loop, 100000 times, inserting simple string in the db. It completes correctly but at the end the indexedDB from the inspector is not visible. Not even refreshing the page. If trying to reload the page and reopening the database I get a DOM exception. If closing chrome it hangs and I have to kill it.
Below the code:
var testIndexedDB = {
db : null,
request: null,
openDB : function(){
var self = this;
var request = this.request = indexedDB.open("web", 1);
request.onupgradeneeded = function() {
// The database did not previously exist, so create object stores and indexes.
request.close();
var db = request.result;
var stories = db.createObjectStore("stories", {keyPath: "id"});
};
request.onsuccess = function() {
self.db = request.result;
};
request.onerror = function(e) {
console.log(e);
};
request.onblocked = function(e){
console.log('blocked')
}
},
addItem: function(store, loid, text){
var db = this.db;
var trans = db.transaction(store, "readwrite");
var store = trans.objectStore(store);
var request = store.put({
"id": loid,
"text" : text
});
request.onsuccess = function(e) {
// Re-render all the todo's
};
request.onerror = function(e) {
console.log(e.value);
};
},
getItem: function(store, loid){
var db = this.db;
var trans = db.transaction(store);
var store = trans.objectStore(store);
var request = store.get(loid);
request.onsuccess = function(e) {
console.log(request.result) // Refresh the screen
};
request.onerror = function(e) {
console.log(e);
};
},
removeItem: function(store, loid){
var db = this.db;
var trans = db.transaction(store, "readwrite");
var store = trans.objectStore(store);
var request = store.delete(loid);
request.onsuccess = function(e) {
console.log('el deleted'); // Refresh the screen
};
request.onerror = function(e) {
console.log(e);
};
},
testSize: function(){
var i = 0,
t;
while(i<100000){
t = new Date().getTime();
this.addItem('stories', i, t)
i++;
}
console.log('items added')
}
};
testIndexedDB.openDB();
simply run testIndexedDB.testSize() to notice the issue.
How can I properly test continuos insertion and why is this happening?
thanks
The main problem that you have in the batch insert is the fact that you are opening 100000 transaction and that blocks the database. I've made some optimizations to your code for batch inserting and now the insert time is less than 5s.
Instead of opening separate transaction for every item, I group the items in arrays of 1000 items and then open a transaction for that batch. Now the number of transaction is reduced to 100. Here are all the changes:
First I created a batch insert Function:
addItems: function(store, items){
var db = this.db;
var trans = db.transaction(store, "readwrite"); //uses only one transaction per batch of 1000
trans.oncomplete = function(e){
console.log('batch inserted');
};
var store = trans.objectStore(store);
for(var i=0; i<items.length; i++){
var request = store.put(items[i]);
request.onerror = function(e) {
console.log(e.value);
};
}
},
And then I've changed the inserting function to send the data in batches:
testSize: function(){
var i = 0,
t;
tempList = [];
while(i<100000){
t = new Date().getTime();
tempList.push({
"id": i,
"text" : t
})
if(i> 0 && i% 1000 == 0){ //items are grouped into a array of 1000 items
this.addItems('stories', tempList.slice());
tempList = [];
}
i++;
}
console.log('items added');
}