I got a Jquery function that read a FilesList and display in an IMG html object the image.
function load_images(files) {
for (var i = 0; i < files.length; i++) {
// Validate the image type
if(validate_file(files[i])) {
var reader = new FileReader();
reader.onload = function(e) {
$(".upload_thumbnails").append(render_thumb(e.target.result, i)); // Return a string with the img object
};
}
reader.readAsDataURL(f);
}
}
But my images are not append in the sequential order of the fileList. The fileList (var files) is implement by an multiple input file html object.
Do you have any idea ?
The method readAsDataURL is asynchronous meaning that your loop will create a lot of requests to load data, but because the method is asynchronous there is not way to to know in which order the onload callback will be called. The behaviour is non-deterministic.
This could be solved by storing all the elements in an array along with their index and then actually rendering out all the images when they have all loaded completely.
Another alternative is creating a placeholder div when the requests is started and capture it in the closure of the onload callback. Then you could append the image to that div, this would cause the behaviour you want.
Like this:
function load_images(files) {
for (var i = 0; i < files.length; i++) {
// Validate the image type
if(validate_file(files[i])) {
var reader = new FileReader(),
div = $("<div></div>");
$(".upload_thumbnails").append(div);
reader.onload = function(e) {
div.append(render_thumb(e.target.result, i)); // Return a string with the img object
};
}
reader.readAsDataURL(f);
}
}
Related
I need your help with following problem:
I have HTML input which supports multiple files;
I upload let's say 5 files;
Each file is processed: it is readAsDataURL by FileReader and data of file is saved to object(there will be other params saved too, that is why object), which is pushed to array.
After I run flow I described, length of final array is NOT changed.
I believe problem is in async behaviour, but I cannot understand how should I change code to make it work, that is why I ask you for a help. Please find code below:
var controls = document.getElementById('controls');
function processUploadedFilesData(files) {
if (!files[0]) {
return;
};
var uploads = [];
for (var i = 0, length = files.length; i < length; i++) {
(function(file) {
var reader = new FileReader();
//I need object, as other params will be saved too in future;
var newFile = {};
reader.readAsDataURL(file);
reader.onloadend = function(e) {
newFile.data = e.target.result;
uploads.push(newFile);
}
})(files[i]);
}
return uploads;
}
controls.addEventListener('change', function(e) {
var uploadedFilesOfUser = processUploadedFilesData(e.target.files);
alert(uploadedFilesOfUser.length);
});
Codepen example - https://codepen.io/yodeco/pen/xWevRy
I am doing a simple file text upload using FileReader.
var filesInput = document.getElementById("txtImport");
for (var i = 0; i < filesInput.files.length; i++) {
current = filesInput.files[i];
var reader = new FileReader();
reader.onload = function(file) {
return function(e) {
console.log('e', e) // not logging
}
}(current)
}
Upon reading FileReader onload with result and parameter, I need to use closure so as to not lose the scope inside the loop. When I click the button to trigger the upload, why is the log not coming up? Why isn't the function firing?
You need to call one of the readAs___ methods of the FileReader:
https://developer.mozilla.org/en-US/docs/Web/API/FileReader
If you're reading multiple files parallel, you need a separate reader for each.
Also, the parameter the event handler receives is an event object, not the contents of the file. Those will be in reader.result.
for (var i = 0; i < filesInput.files.length; i++) {
let reader = new FileReader();
reader.onload = function(event) {
console.log(reader.result);
}
reader.readAsText(filesInput.files[i]);
}
i am facing the issue i always get the last image in my image array due to kind of Filereader library function onloadend.
how can i get base64 for all images in my folder.
<input id="file-input" multiple webkitdirectory type="file" />
var input = document.getElementById('file-input');
var file_names = "";
var entries_length = 0;
var entries_count = 0;
var image = new Array();
var obj = {};
var j = 0;
input.onchange = function(e) {
var files = e.target.files; // FileList
entries_length = files.length;
console.log(files);
for (var i = 0, f; f = files[i]; ++i){
console.log("i:"+i);
entries_count = entries_count + 1;
//console.debug(files[i].webkitRelativePath);
if(files[i].type=="image/jpeg")
{
var string = files[i].webkitRelativePath;
var name = string.split("/")[3]; //this is because my image in 3rd dir in the folder
var reader = new FileReader();
reader.onloadend = function() {
obj.name = string.split("/")[3];
obj.image = reader.result;
image[j] = obj;
j = j+1;
}
reader.readAsDataURL(files[i]);
}
}
console.log(image);
}
The issue is caused by the asynchronous loading of files. You iterate over the array and set the onloadend handler for the reader each time, then start loading by calling readAsDataURL.
One problem is that by the time your first image loads, it is possible the for loop has completed, and i is already at the last index of the array.
At this point, obtaining the path from files[i].webkitRelativePath will give you the last filename, and not the one you are expecting.
Check the example for readAsDataURL on MDN to see one possible solution - each load is performed in a separate function, which preserves its scope, along with file.name. Do not be put off by the construction they are using: [].forEach.call(files, readAndPreview). This is a way to map over the files, which are a FileList and not a regular array (so the list does not have a forEach method of its own).
So, it should be sufficient to wrap the loading logic in a function which takes the file object as a parameter:
var images = [];
function loadFile(f) {
var reader = new FileReader();
reader.onloadend = function () {
images.push({
name : f.name, // use whatever naming magic you prefer here
image : reader.result
});
};
reader.readAsDataURL(f);
}
for (var i=0; i<files.length; i++) {
loadFile(files[i]);
}
Each call of the function 'remembers' the file object it was called with, and prevents the filename from getting messed up. If you are interested, read up on closures.
This also has the nice effect of isolating your reader objects, because I have a sneaking suspicion that, although you create a new 'local' reader each iteration, javascript scoping rules are weird and readers could also be interfering with each other (what happens if one reader is loading, but in the same scope you create a new reader with the same variable name? Not sure).
Now, you do not know how long it would take for all images to be loaded, so if you want to take an action right after that, you would have to perform a check each time an onloadend gets called. This is the essence of asynchronous behavior.
As an aside, I should note that it is pointless to manually keep track of the last index of images, which is j. You should just use images.push({ name: "bla", image: "base64..." }). Keeping indices manually opens up possibilities for bugs.
I have a page where the user can select a folder to upload files. Before sending the files, I need to read them and check the data. My code is organized as follows:
$( '#folder-select' ).on('change', getValidFileList);
var fileList = [];
var getValidFileList = function(event) {
//Get the selected files
files = $( this ).get(0).files;
for(var i=0; i<files.length; i++) {
checkFile(files[i]);
}
//Do something with the final fileList
console.log(fileList);
};
var checkFile = function(file) {
var reader = new FileReader();
reader.onload = function (event) {
//Here I parse and check the data and if valid append it to fileList
};
reader.readAsArrayBuffer(file);
};
I would like to take the resulting fileList array to keep processing/displaying the uploaded files. I found that reader.onload() is called asynchronously, so the result of the console.log(fileList) after the for loop is an empty array (it is executed before the reader.onload() is fired). Is there any way to wait until all files are read and appended to fileList?
Just keep track of how many files has been processed compared to how many files has been given:
function getValidFileList(files, callback) {
var count = files.length; // total number of files
var fileList = []; // accepted files
//Get the selected files
for(var i = 0; i < count; i++) { // invoke readers
checkFile(files[i]);
}
function checkFile(file) {
var reader = new FileReader();
reader.onload = function(event) {
var arrayBuffer = this.result;
//Here I parse and check the data and if valid append it to fileList
fileList.push(arrayBuffer); // or the original `file` blob..
if (!--count) callback(fileList); // when done, invoke callback
};
reader.readAsArrayBuffer(file);
}
};
The --count will subtract one per reader onload hit. When =0 (or !count) it invokes the callback. Notice that the array order may not be the same as the one from files[n] it this should matter.
Then invoke it like this:
$( '#folder-select' ).on('change', function() {
getValidFileList(this.files, onDone)
});
function onDone(fileList) {
// continue from here
}
In this javascript/jquery code I attempt to read multiple files and store them in a dictionary.
function handleFileSelect(evt) {
var files = evt.target.files; // FileList object
var f, filename;
for (var i = 0; i<files.length; i++) {
f = files[i];
filename = escape(f.name);
if (filename.toLowerCase().endsWith(".csv")) {
var reader = new FileReader();
// Closure to capture the file information.
reader.onload = (function(e) {
var text = reader.result;
var arrays = $.csv.toArrays(text);
frequencies[filename] = arrays;
generateMenuFromData();
});
// Read in the image file as a data URL.
reader.readAsText(f);
}
}
}
I read only the .csv files. I want to run generateMenuFromData(); only on the last time the reader.onload function runs.
I can't find a good way to do this properly. Does anyone know how?
Thanks.
Increase a counter inside the event handler. If it is the same the length of the array, execute the function. A more structured approach would be to use promises, but in this simple case it would suffice:
function handleFileSelect(evt) {
var files = evt.target.files;
var f, filename, loaded = 0;
for (var i = 0; i<files.length; i++) {
f = files[i];
filename = escape(f.name);
if (filename.toLowerCase().endsWith(".csv")) {
var reader = new FileReader();
reader.onload = (function(filename, reader) {
return function(e) {
frequencies[filename] = $.csv.toArrays(reader.result);
loaded += 1; // increase counter
if (loaded === files.length) {
// execute function once all files are loaded
generateMenuFromData();
}
};
}(filename, reader)); // <-- new scope, "capture" variable values
reader.readAsText(f);
}
}
}
Now, your real problem might be that you are creating a closure inside the loop. That means when the load event handlers are called, filename and reader will refer to the values the variable had in the last iteration of the loop. All handlers share the same variables.
See also Javascript closure inside loops - simple practical example.