How to save uploaded image to IndexedDB Javascript - javascript

Am using file upload controller to browse images and the selected images should be previewed in the page as image thumbnails.
<input type="file" id="imageSelector" multiple="multiple" />
var uploadImageCtrl = document.querySelector('#imageSelector');
uploadImageCtrl.addEventListener('change', function () {
var files = this.files;
for(var i=0; i<files.length; i++){
preview(this.files[i]);
}
}, false);
After selecting few images, go to next page and do some action. And when going back from that page, all the image previews should be there. I thought of saving these images to IndexedDB, before going to next page. But am not sure how to code for IndexedDB in this case.
Can anyone help me?

File objects are cloneable and can be saved to Indexed DB, either as records on their own or as part of other records.
This example just saves an array of files as a single record (key is "key") to an object store named "images":
// Call with array of images; callback will be called when done.
function save(array_of_files, callback) {
openDB(function(db) {
var tx = db.transaction('images', 'readwrite');
tx.objectStore('images').put(array_of_files, 'key');
tx.oncomplete = function() { callback(); };
tx.onabort = function() { console.log(tx.error); };
});
}
// Callback will be called with array of images, or undefined
// if not previously saved.
function load(callback) {
openDB(function(db) {
var tx = db.transaction('images', 'readonly');
var req = tx.objectStore('images').get('key');
req.onsuccess = function() {
callback(req.result);
};
});
}
function openDB(callback) {
var open = indexedDB.open('my_db');
open.onupgradeneeded = function() {
var db = open.result;
db.createObjectStore('images');
};
open.onsuccess = function() {
var db = open.result;
callback(db);
};
open.onerror = function() { console.log(open.error); };
}
One possible gotcha: HTMLInputElement's files is not an Array itself but an array-like type called FileList. You can convert it to an array with e.g. Array.from(e.files), but a FileList can be cloned (and therefore stored in IDB) so this should "just work".

Related

Loop multiple files from input, save each file readAsDataURL data to array

I need your help with following problem:
I have HTML input which supports multiple files;
I upload let's say 5 files;
Each file is processed: it is readAsDataURL by FileReader and data of file is saved to object(there will be other params saved too, that is why object), which is pushed to array.
After I run flow I described, length of final array is NOT changed.
I believe problem is in async behaviour, but I cannot understand how should I change code to make it work, that is why I ask you for a help. Please find code below:
var controls = document.getElementById('controls');
function processUploadedFilesData(files) {
if (!files[0]) {
return;
};
var uploads = [];
for (var i = 0, length = files.length; i < length; i++) {
(function(file) {
var reader = new FileReader();
//I need object, as other params will be saved too in future;
var newFile = {};
reader.readAsDataURL(file);
reader.onloadend = function(e) {
newFile.data = e.target.result;
uploads.push(newFile);
}
})(files[i]);
}
return uploads;
}
controls.addEventListener('change', function(e) {
var uploadedFilesOfUser = processUploadedFilesData(e.target.files);
alert(uploadedFilesOfUser.length);
});
Codepen example - https://codepen.io/yodeco/pen/xWevRy

Wait until all files are read asynchronously (FileReader) and then run code

I have a page where the user can select a folder to upload files. Before sending the files, I need to read them and check the data. My code is organized as follows:
$( '#folder-select' ).on('change', getValidFileList);
var fileList = [];
var getValidFileList = function(event) {
//Get the selected files
files = $( this ).get(0).files;
for(var i=0; i<files.length; i++) {
checkFile(files[i]);
}
//Do something with the final fileList
console.log(fileList);
};
var checkFile = function(file) {
var reader = new FileReader();
reader.onload = function (event) {
//Here I parse and check the data and if valid append it to fileList
};
reader.readAsArrayBuffer(file);
};
I would like to take the resulting fileList array to keep processing/displaying the uploaded files. I found that reader.onload() is called asynchronously, so the result of the console.log(fileList) after the for loop is an empty array (it is executed before the reader.onload() is fired). Is there any way to wait until all files are read and appended to fileList?
Just keep track of how many files has been processed compared to how many files has been given:
function getValidFileList(files, callback) {
var count = files.length; // total number of files
var fileList = []; // accepted files
//Get the selected files
for(var i = 0; i < count; i++) { // invoke readers
checkFile(files[i]);
}
function checkFile(file) {
var reader = new FileReader();
reader.onload = function(event) {
var arrayBuffer = this.result;
//Here I parse and check the data and if valid append it to fileList
fileList.push(arrayBuffer); // or the original `file` blob..
if (!--count) callback(fileList); // when done, invoke callback
};
reader.readAsArrayBuffer(file);
}
};
The --count will subtract one per reader onload hit. When =0 (or !count) it invokes the callback. Notice that the array order may not be the same as the one from files[n] it this should matter.
Then invoke it like this:
$( '#folder-select' ).on('change', function() {
getValidFileList(this.files, onDone)
});
function onDone(fileList) {
// continue from here
}

Load multiple JSON files in pure JavaScript

I am new to JavaScript. I have already understood how to create an object from a JSON-file with JSON.Parse() and now I need to load multiple local JSONs into an array. I've been googling my problem for a while, but everything that I found was related to single JSON files.
Is there any way to do this in pure JavaScript without any libraries like jQuery and etc.?
P.S.: There is no need to work with web-server or else, the code is running locally.
To do this, you need to first get the actual files. Then, you should parse them.
// we need a function to load files
// done is a "callback" function
// so you call it once you're finished and pass whatever you want
// in this case, we're passing the `responseText` of the XML request
var loadFile = function (filePath, done) {
var xhr = new XMLHTTPRequest();
xhr.onload = function () { return done(this.responseText) }
xhr.open("GET", filePath, true);
xhr.send();
}
// paths to all of your files
var myFiles = [ "file1", "file2", "file3" ];
// where you want to store the data
var jsonData = [];
// loop through each file
myFiles.forEach(function (file, i) {
// and call loadFile
// note how a function is passed as the second parameter
// that's the callback function
loadFile(file, function (responseText) {
// we set jsonData[i] to the parse data since the requests
// will not necessarily come in order
// so we can't use JSONdata.push(JSON.parse(responseText));
// if the order doesn't matter, you can use push
jsonData[i] = JSON.parse(responseText);
// or you could choose not to store it in an array.
// whatever you decide to do with it, it is available as
// responseText within this scope (unparsed!)
}
})
If you can't make an XML Request, you can also use a file reader object:
var loadLocalFile = function (filePath, done) {
var fr = new FileReader();
fr.onload = function () { return done(this.result); }
fr.readAsText(filePath);
}
You can do something like this:
var file1 = JSON.parse(file1);
var file2 = JSON.parse(file2);
var file3 = JSON.parse(file3);
var myFileArray = [file1, file2, file3];
// Do other stuff
// ....
// Add another file to the array
var file4 = JSON.parse(file4);
myFileArray.push(file4);
If you already have an array of un-parsed files you could do this:
var myFileArray = [];
for(var i=0; i<unparsedFileArray.length; i++){
myFileArray.push(JON.parse(unparsedFileArray[i]));
}

Uploading files to parse.com with javascript

I'm attempting to upload an array of files to parse using javascript with the following code:
html:
<fieldset>
<input type="file" name="fileselect" id="fileselect" multiple></input>
<input id="uploadbutton" type="button" value="Upload"> </input>
</fieldset>
JS:
$('#uploadbutton').click(function () {
var fileUploadControl = $("#fileselect")[0];
if (fileUploadControl.files.length > 0) {
var file = fileUploadControl.files[0];
var name = "style.css";
var parseFile = new Parse.File(name, file);
var filesArray = [parseFile];
}
parseFile.save().then(function() {
// The file has been saved to Parse.
}, function(error) {
// The file either could not be read, or could not be saved to Parse.
});
var newStore = new Parse.Object("FileStore");
newStore.set("files", filesArray);
newStore.save();
});
I am uploading to a class I have called FileStore with key "files" which is set to an array currently, and I would like to have hold an array of files. Is this the best way to go about uploading multiple files to parse? The code for me isn't working right now. My aim is to have multiple files associated with each object in my class.
Be careful with async code. Currently your code will have a race condition that will most likely fail because you call newStore.save() before parseFile.save() is finished.
Those 3 lines dealing with newStore should be inside the success handler for parseFile.save(), e.g.:
parseFile.save().then(function() {
// The file has been saved to Parse.
var newStore = new Parse.Object("FileStore");
newStore.set("files", filesArray);
newStore.save();
}, function(error) {
// The file either could not be read, or could not be saved to Parse.
});
When you get to saving multiple files you'll need to wait for all of them to finish before moving to the next step. You can chain your promises together to run in Series or in Parallel.
For what you want Parallel would work fine:
var fileSavePromises = [];
// assuming some code that creates each file has stored the Parse.File objects in an
// array called "filesToSave" but not yet called save
_.each(filesToSave, function(file) {
fileSavePromises.push(file.save());
});
Parse.Promise.when(fileSavePromises).then(function() {
// all files have saved now, do other stuff here
var newStore = new Parse.Object("FileStore");
newStore.set("files", filesToSave);
newStore.save();
});
I have managed to solve this with the help of #Timothy code, full code after edit:
var fileUploadControl = $("input[type=file]")[0];
var filesToSave = fileUploadControl.files;
var fileSavePromises = [];
// assuming some code that creates each file has stored the Parse.File objects in an
// array called "filesToSave" but not yet called save
_.each(filesToSave, function(file) {
var parseFile = new Parse.File("photo.jpg", file);
fileSavePromises.push(
parseFile.save().then(function() {
// The file has been saved to Parse.
$.post("/i", {
file: {
"__type": "File",
"url": parseFile.url(),
"name": parseFile.name()
}
});
})
);
});
Parse.Promise.when(fileSavePromises).then(function() {
// all files have saved now, do other stuff here
window.location.href = "/";
});

Recall CollectionFS file using Metadata (Meteor)

I am creating an application that allows uploading and displaying of images. The users can upload images and then all of the images are displayed on one page. The biggest issue is that all the images need to display under the name of the owner.
I am using CollectionFS to upload and store the files, this is my storeFile method:
Template.queueControl.events({
'change .fileUploader': function (e) {
var files = e.target.files;
for (var i = 0, f; f = files[i]; i++) {
console.log(Meteor.user().username);
ImageFS.storeFile(f, {username: Meteor.user().username});
}
}
});
I thought that I would be able to call the file back by using a query like this:
Template.studentModal.getImage = function(){
return ImageFS.find({username: Session.get("studentUsername")});
}
The setting and getting of the username work fine and as expected. It will not return any records though.
Thanks,
Skylar
You should convert the "file" to "FS.File" before calling "storeFile". Now you can add metadata like "owner" to an image.
This should work at least in Meteor 0.9:
Template.queueControl.events({
'change .fileUploader': function (e) {
var files = e.target.files;
for (var i = 0, f; f = files[i]; i++) {
var newFile = new FS.File(files[i]);
newFile.owner = Meteor.user().username;
Images.insert(newFile, function(err, fileObj) {});
}
}
});
You should store the userId and subscribe a publication that publishes the shown users, just with usernames.
I use https://github.com/englue/meteor-publish-composite for publication composition.

Categories

Resources