Only allow one image at the same time with File API?
What's the correct method to set the restriction for only one image at the same time?
<input id="browse" type="file" multiple>
<div id="imgs"></div>
<style type="text/css">
#imgs {
height: imageheight;
width: imagewidth;
position: absolute;
top: 39px;
left: 9px;
}
</style>
<script type="text/javascript">
window.URL = window.URL || window.webkitURL;
var elBrowse = document.getElementById("browse"),
elPic = document.getElementById("imgs"),
useBlob = false && window.URL;
function readImage(file) {
var reader = new FileReader();
reader.addEventListener("load", function() {
var image = new Image();
image.addEventListener("load", function() {
elPic.appendChild(this);
});
image.src = useBlob ? window.URL.createObjectURL(file) : reader.result;
});
reader.readAsDataURL(file);
}
elBrowse.addEventListener("change", function() {
var files = this.files; {
for (var i = 0; i < files.length; i++) {
var file = files[i];
if ((/\.(png|jpeg|jpg|gif)$/i).test(file.name)) {
readImage(file);
}
}
}
});
</script>
One solution could be a promise chain like this:
function readImage(file) {
//wrap readImage body into a promise
return new Promise((resolve) => {
var reader = new FileReader();
reader.addEventListener("load", function () {
var image = new Image();
image.addEventListener("load", function () {
elPic.appendChild(this);
//resolve the promise after the child is appended so the caller would know when to start the next one
resolve();
});
image.src = useBlob ? window.URL.createObjectURL(file) : reader.result;
});
reader.readAsDataURL(file);
});
}
elBrowse.addEventListener("change", function () {
var files = this.files; {
//start the chain
var chain = Promise.resolve();
for (var i = 0; i < files.length; i++) {
//use let to have properly scoped variable
let file = files[i];
if ((/\.(png|jpeg|jpg|gif)$/i).test(file.name)) {
//append functions to call to the chain
chain = chain.then(() => readImage(file));
}
}
}
});
The call to the next readImage() is performed after resolve() is called in the previous one - after the image is loaded and appended.
You might want to consider using loadend event also, it is emitted even when the loading fails for some reason so it won't break your chain.
https://developer.mozilla.org/en-US/docs/Web/Events/loadend
I started a new approach that leads to the desired result.
<body style="margin:8px">
<img style="position:absolute; top:39px; left:9px"
height="imageheight" width="imagewidth">
<input type="file" onchange="previewFile()">
<script type="text/javascript">
function previewFile() {
var preview = document.querySelector('img');
var file = document.querySelector('input[type=file]').files[0];
var reader = new FileReader();
reader.addEventListener("load", function () {
preview.src = reader.result;
}, false);
if ((/\.(png|jpeg|jpg|gif)$/i).test(file.name)) {
reader.readAsDataURL(file); }
}
</script>
Related
I am working with FileReader and I have come across an issue with the onLoad method of FileReader not being executed synchronously with the rest of the code. Basically I have 2 functions like these:
imageExists(url, callback) {
var img = new Image();
img.onload = function () { callback(true); };
img.onerror = function () { callback(false); };
img.src = url;
}
isImageCorrupt(file): boolean {
var reader = new FileReader();
var isCorrupt = false;
reader.readAsDataURL(file);
reader.onload = (e) => {
this.imageExists(e.target.result, (exists) => {
if (exists) {
isCorrupt = false;
// Image is not corrupt
} else {
isCorrupt = true;
//Image is corrupt
}
});
};
return isCorrupt;
}
The isImageCorrupt() function calls the reader.onLoad which calls the imageExists callback function, which also contains a image onload method.
The problem is that during the execution of the isImageCorrupt() function, the reader.onLoad has not changed the value of isCorrupt yet but the function has returned the value in the last line which is always false.
I want my function to wait for the reader.onLoad to finish its execution before the function returns the value.
maybe something like this?
isImageCorrupt(file): Promise<Boolean> {
return new Promise((resolve) => {
var reader = new FileReader();
reader.onload = (e) => {
var img = new Image();
img.onload = function () {
resolve(false);
};
img.onerror = function () {
resolve(true);
};
img.src = <string>e.target.result;
}
reader.readAsDataURL(file);
});
}
*disclaimer: I did not test it
You could use Promises. The code could be still refactorized using async/await
isImageCorrupt(file): Promise<boolean> {
return new Promise<boolean>((resolve,reject)=>{
var reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = ()=>{
var img = new Image();
img.src = reader.result;
img.onload = function () {
resolve(false);
};
img.onerror = function () {
resolve(true);
};
}
reader.onerror=()=>{reject(true)}
});
};
isImageCorrupt(yourFile).then((result)=>{/*HERE USE RESULT*/},(error)=>{HERE USE ERROR RESULT})
However you shouldn't return true o false, but resolve if it's ok and reject otherwise, whithout a boolean value in this way
isImageCorrupt(file): Promise<void> {
return new Promise<void>((resolve,reject)=>{
var reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = ()=>{
var img = new Image();
img.src = reader.result;
img.onload = function () {
resolve();
};
img.onerror = function () {
reject();
};
}
reader.onerror=()=>{reject()}
});
};
isImageCorrupt(yourFile).then(()=>{/*HERE OK*/},()=> {HERE
THERE WAS SOME ERROR/PROBLEM})
I am new to coding in javascript, I have been tasked as an excersise using some pre-existing functions to create an image item using a given url. I need to read the image, store it in local storage and then display it on the screen. The only functionI need to add too is process file. I am stuck and cannot get it to display the image. I get an error saying that the path does not exist. Below is my code, any guidance to what I am doing wrong? I feel like I am very close but doing something funamendatly wrong.
function addImageEntry(key, url) {
var imgElement = new Image();
imgElement.alt = "Photo entry";
imgElement.src = url;
addSection(key, imgElement);
}
function makeItem(type, data) {
var itemObject = { type: type, data: data };
return JSON.stringify(itemObject);
}
function processFile(event) {
function addImage(url) {
var key = "diary" + Date.now();
addImageEntry(key, url);
var imgElement = new Image();
imgElement.src = url;
var item = makeItem("image", imgElement.src);
localStorage.setItem(key, item);
}
var inputElement = event.target;
var fileObject = inputElement.files[0];
var reader = new FileReader();
reader.addEventListener("load",addImage);
url = reader.readAsDataURL(fileObject);
event.target.value = "";
}
The url = reader.readAsDataURL(fileObject) won't store the image data.
It stores in reader.result.
Use imgElement.src = reader.result.
document.querySelector('input[type="file"]').addEventListener('change', processFile)
function processFile(event) {
var fileObject = event.target.files[0]
var reader = new FileReader()
reader.addEventListener('load', addImage)
reader.readAsDataURL(fileObject)
function addImage() {
var imgElement = new Image()
imgElement.src = reader.result
document.body.append(imgElement)
}
}
Amend:
function addImage(event) {
// use reader.result is ok too
localStorage.setItem('myimage', event.target.result)
}
function addImageEntry() {
var imgElement = new Image()
imgElement.src = localStorage.getItem('myimage')
imgElement.style.display = 'block'
document.body.append(imgElement)
}
I'm trying to make a small snippet to preview images before uploading them:
$.fn.previewImg=function($on){
var input = this;
try{
if (this.is("input[type='file']")) {
input.change(function(){
var reader = new FileReader();
reader.onloadend = function(){
for (var i = 0; i < $on.length; i++) {
if (/img/i.test($on[i].tagName)) $on[i].src = reader.result;
else $on[i].style.bakgroundImage = "url("+reader.result+")";
}
};
});
}else throw new exception("Trying to preview image from an element that is not a file input!");
}catch(x){
console.log(x);
}
};
I'm calling it like:
$("#file").previewImg($(".preview_img"));
but the onloadend function is never called.
FIDDLE
Actually , you got to specify the file and instruct the fileReader to read it.
Below is the corrected code.
$.fn.previewImg=function($on){
var input = this;
try{
if (this.is("input[type='file']")) {
input.change(function(evt){
var reader = new FileReader();
console.log("Input changed");
reader.onloadend = function(){
console.log("onloadend triggered");
for (var i = 0; i < $on.length; i++) {
if (/img/i.test($on[i].tagName)) $on[i].src = reader.result;
else $on[i].style.bakgroundImage = "url("+reader.result+")";
}
};
//get the selected file
var files = evt.target.files;
//instruct reader to read it
reader.readAsDataURL(files[0]);
});
}else throw new exception("Trying to preview image from an element that is not a file input!");
}catch(x){
console.log(x);
}
};
$("#file").previewImg($(".preview_img"));
I have angular controller, which should work with images. It have a watcher for property file in scope. If property will contain array of files, these files (only first file) should be read by FileReader and converted to base64 string and added to page. Something like this:
$scope.$watch('file', function (files) {
if (files && files.length > 0) {
if (files[0].type && files[0].type.match('image.*')) {
var reader = new FileReader();
reader.onload = function (e) {
render(e.target.result);
};
reader.readAsDataURL(files[0]);
}
}
});
and render function:
function render (src) {
var image = new Image();
image.addEventListener('load', function () {
if (image.width < MIN_SIZE || image.height < MIN_SIZE) {
$scope.error = $filter('localize')('UploaderWindow_ImageSizeError');
$scope.$apply();
} else {
new global.ICropper('original-image', {
gap: 0,
keepSquare: true,
image: src,
preview: ['cropped-image']
});
}
});
image.addEventListener('error', function () {
$scope.error = $filter('localize')('UploaderWindow_SelectImage');
$scope.$apply();
});
image.src = src;
};
ICropper should create img element in DOM with base64 in src attribute.
The problem is, I have unit test for this functionality. Test case:
it('Should render new image from file input', function () {
var imageData = image.split(',')[1],
imageType = image.split(',')[0].replace('data:', '').replace(';base64', ''),
file = base64toBlob(imageData, imageType);
expect(originalImage.lastElementChild).toBe(null);
runs(function () {
$scope.file = [file];
$scope.$apply();
});
waitsFor(function () {
return originalImage.lastElementChild;
}, 'waiting_original_form');
runs(function () {
expect(originalImage.lastElementChild.src).toBe(image);
});
});
Variable image contains valid base64 string, originalImage.lastElementChild - img element, which should be created by ICropper. The body of base64toBlob function:
function base64toBlob (b64Data, contentType) {
var binary = atob(b64Data.replace(/\s/g, '')),
binaryLength = binary.length,
buffer = new ArrayBuffer(binaryLength),
view = new Uint8Array(buffer);
for (var i = 0; i < binaryLength; i++) {
view[i] = binary.charCodeAt(i);
}
return new Blob([view], {type: contentType});
}
This test successfully passed in Chrome, but not in PhantomJS:
timeout: timed out after 5000 msec waiting for waiting_original_form
I think, it's because load event for image not fired, error fired instead. But I don't understand why? I know, that Blob not defined in PhantomJS, so I use this polyfill: https://github.com/eligrey/Blob.js
I'm using the FileReader API to read multiple files.
<html> <body>
<input type="file" id="filesx" name="filesx[]"
onchange="readmultifiles(this.files)" multiple=""/>
<div id="bag"><ul/></div>
<script>
window.onload = function() {
if (typeof window.FileReader !== 'function') {
alert("The file API isn't supported on this browser yet.");
}
}
function readmultifiles(files) {
var ul = document.querySelector("#bag>ul");
while (ul.hasChildNodes()) {
ul.removeChild(ul.firstChild);
}
function setup_reader(file) {
var name = file.name;
var reader = new FileReader();
reader.onload = function(e) {
var bin = e.target.result; //get file content
// do sth with text
var li = document.createElement("li");
li.innerHTML = name;
ul.appendChild(li);
}
reader.readAsBinaryString(file);
}
for (var i = 0; i < files.length; i++) { setup_reader(files[i]); }
}
</script> </body> </html>
The problem is that all files are read at the same time, and when the files have a total size (sum) that is very large, the browser crashes.
I want to read one file after another, so that the memory consumption is reduced.
Is this possible?
I came up with a solution myself which works.
function readmultifiles(files) {
var reader = new FileReader();
function readFile(index) {
if( index >= files.length ) return;
var file = files[index];
reader.onload = function(e) {
// get file content
var bin = e.target.result;
// do sth with bin
readFile(index+1)
}
reader.readAsBinaryString(file);
}
readFile(0);
}
I'm updating this question for the benefit of new users, who are looking for a solution to upload multiple files via the FileReader API, especially using ES.
Rather than manually iterating over each file, it's much simpler & cleaner to use Object.keys(files) in ES:
<input type="file" onChange="readmultifiles" multiple/>
<script>
function readmultifiles(e) {
const files = e.currentTarget.files;
Object.keys(files).forEach(i => {
const file = files[i];
const reader = new FileReader();
reader.onload = (e) => {
//server call for uploading or reading the files one-by-one
//by using 'reader.result' or 'file'
}
reader.readAsBinaryString(file);
})
};
</script>
This should read the files one by one:
function readmultifiles(files) {
var ul = document.querySelector("#bag>ul");
while (ul.hasChildNodes()) {
ul.removeChild(ul.firstChild);
}
// Read first file
setup_reader(files, 0);
}
// Don't define functions in functions in functions, when possible.
function setup_reader(files, i) {
var file = files[i];
var name = file.name;
var reader = new FileReader();
reader.onload = function(e){
readerLoaded(e, files, i, name);
};
reader.readAsBinaryString(file);
// After reading, read the next file.
}
function readerLoaded(e, files, i, name) {
// get file content
var bin = e.target.result;
// do sth with text
var li = document.createElement("li");
li.innerHTML = name;
ul.appendChild(li);
// If there's a file left to load
if (i < files.length - 1) {
// Load the next file
setup_reader(files, i+1);
}
}
Define the input using multiple property:
<input onchange = 'upload(event)' type = 'file' multiple/>
Define the upload function:
const upload = async (event) => {
// Convert the FileList into an array and iterate
let files = Array.from(event.target.files).map(file => {
// Define a new file reader
let reader = new FileReader();
// Create a new promise
return new Promise(resolve => {
// Resolve the promise after reading file
reader.onload = () => resolve(reader.result);
// Read the file as a text
reader.readAsText(file);
});
});
// At this point you'll have an array of results
let res = await Promise.all(files);
}
My complete solution is here:
<html> <body>
<input type="file" id="filesx" name="filesx[]"
onchange="readmultifiles(this.files)" multiple=""/>
<div id="bag"></div>
<script>
window.onload = function() {
if (typeof window.FileReader !== 'function') {
alert("The file API isn't supported on this browser yet.");
}
}
function readmultifiles(files) {
var reader = new FileReader();
function readFile(index) {
if( index >= files.length ) return;
var file = files[index];
reader.onload = function(e) {
// get file content
var bin = e.target.result;
// do sth with bin
readFile(index+1)
}
reader.readAsBinaryString(file);
}
readFile(0);
function setup_reader(file) {
var name = file.name;
var reader = new FileReader();
var ul = document.createElement("ul");
document.getElementById('bag').appendChild(ul);
reader.onload = function(e) {
var bin = e.target.result; //get file content
// do sth with text
var li = document.createElement("li");
li.innerHTML = name;
ul.appendChild(li);
}
reader.readAsBinaryString(file);
}
for (var i = 0; i < files.length; i++) { setup_reader(files[i]); }
}
</script> </body> </html>
I implemented another solution using modern JS (Map, Iterator). I adapted the code from my Angular application (originally written with some TS features).
Like Steve KACOU mentioned, we create a different FileReader instance for each file.
<input type="file" id="filesx" name="filesx[]"
onchange="processFileChange(this)" multiple=""/>
function processFileChange(event) {
if (event.target.files && event.target.files.length) {
const fileMap = new Map();
for (let i = 0; i < event.target.files.length; i++) {
const file = event.target.files[i];
const fileReader = new FileReader();
fileMap.set(fileReader, file);
}
const mapEntries = fileMap.entries();
readFile(mapEntries);
}
}
function readFile(mapEntries) {
const nextValue = mapEntries.next();
if (nextValue.done === true) {
return;
}
const [fileReader, file] = nextValue.value;
fileReader.readAsDataURL(file);
fileReader.onload = () => {
// Do black magic for each file here (using fileReader.result)
// Read the next file
readFile(mapEntries);
};
}
Basically this takes advantage of passing objects by reference to perpetuate the map with every iteration. This makes the code quite easy to read in my opinion.
Taking the best parts of these answers.
<input type="file" onchange="readmultifiles(this.files)" multiple />
<script>
function readmultifiles(files) {
for (file of files) {
const reader = new FileReader();
reader.readAsBinaryString(file);
reader.fileName = file.name;
reader.onload = (event) => {
const fileName = event.target.fileName;
const content = event.currentTarget.result;
console.log({ fileName, content });
};
}
}
</script>
You must instantiate a FileReader for each file to read.
function readFiles(event) {
//Get the files
var files = event.input.files || [];
if (files.length) {
for (let index = 0; index < files.length; index++) {
//instantiate a FileReader for the current file to read
var reader = new FileReader();
reader.onload = function() {
var result = reader.result;
console.log(result); //File data
};
reader.readAsDataURL(files[index]);
}
}
}
Try this
const setFileMultiple = (e) => {
e.preventDefault();
//Get the files
let file = [...e.target.files] || [];
file.forEach((item, index) => {
let reader = new FileReader();
reader.onloadend = () => {
console.log("result", reader.result);
};
reader.readAsDataURL(file[index]);
});
};