On the client side, how can I use the native browser filesystem API to embed base64 image data within an IMG element?
function createIMGFromFS () {
var img = document.createElement("img");
const openFile = async () => {
const [fh] = await window.showOpenFilePicker();
var file2 = await fh.getFile();
var name = file2.name;
var fr = new FileReader();
fr.readAsDataURL(file2);
fr.onloadend = function() {
img.src = fr.result;
}
}
const file = openFile();
return img;
}
Related
I am building a plugin for FIGMA, where the user selects multiple images, which I then save in an array, that I send to be interpreted.
I have 2 issues with my code
img.src = bin; does not trigger img.onload, but if I set img.src = "literal string", the onload method workds.
the imageData array sent at the end is undefined, I assume because of my bad understanding of async functions.
I would appreciate your help figuring this out. Thank you
P.S. this is pure javascript, and you don't need to know FIGMA to follow the code.
<input type="file" id="images" accept="image/png" multiple />
<button id="button">Create image</button>
<script>
const button = document.getElementById('button');
button.onclick = () => {
const files = document.getElementById('images').files;
function readFile(index) {
console.log(1);
if (index >= files.length) {return};
console.log(2);
const file = files[index];
const imageCaption = file.name;
var reader = new FileReader();
reader.readAsArrayBuffer(file);
reader.onload = function (e) {
console.log(4);
// get file content
const bin = e.target.result;
const imageBytes = new Uint8Array(bin);
//Get Image width and height
var img = new Image();
img.src = bin;
img.onload = function () {
console.log(6);
const width = img.width;
const height = img.height;
console.log("imageCaption: " + imageCaption);
console.log("width: " + width);
console.log("height: " + height);
console.log("imageBytes: " + imageBytes);
var data = {imageBytes, imageCaption, width, height};
//Read Next File
nextData = readFile(index + 1);
if( nextData ) {
data.concat(nextData)
}
return data;
}
}
}
//Call function to Read and Send Images
const imageData = readFile(0);
//Send Data
parent.postMessage({
pluginMessage: {
type: 'send-image',
imageData,
}
}, '*');
A friend ended up helping me with it. Thank you Hiba!
const button = document.getElementById('button');
const input = document.getElementById('input');
button.addEventListener('click', async () => {
const files = input.files ? [...input.files] : [];
const data = await Promise.all(
files.map(async (file) => await getImageData(file))
);
console.log(data);
});
async function getImageData(file) {
// get binary data from file
const bin = await file.arrayBuffer();
// translate bin data into bytes
const imageBytes = new Uint8Array(bin)
// create data blob from bytes
const blob = new Blob([imageBytes], { type: "image/png" });
// create html image element and assign blob as src
const img = new Image();
img.src = URL.createObjectURL(blob);
// get width and height from rendered image
await img.decode();
const { width, height } = img;
const data = { image: blob, caption: file.name, width, height };
return data;
}
I am new to coding in javascript, I have been tasked as an excersise using some pre-existing functions to create an image item using a given url. I need to read the image, store it in local storage and then display it on the screen. The only functionI need to add too is process file. I am stuck and cannot get it to display the image. I get an error saying that the path does not exist. Below is my code, any guidance to what I am doing wrong? I feel like I am very close but doing something funamendatly wrong.
function addImageEntry(key, url) {
var imgElement = new Image();
imgElement.alt = "Photo entry";
imgElement.src = url;
addSection(key, imgElement);
}
function makeItem(type, data) {
var itemObject = { type: type, data: data };
return JSON.stringify(itemObject);
}
function processFile(event) {
function addImage(url) {
var key = "diary" + Date.now();
addImageEntry(key, url);
var imgElement = new Image();
imgElement.src = url;
var item = makeItem("image", imgElement.src);
localStorage.setItem(key, item);
}
var inputElement = event.target;
var fileObject = inputElement.files[0];
var reader = new FileReader();
reader.addEventListener("load",addImage);
url = reader.readAsDataURL(fileObject);
event.target.value = "";
}
The url = reader.readAsDataURL(fileObject) won't store the image data.
It stores in reader.result.
Use imgElement.src = reader.result.
document.querySelector('input[type="file"]').addEventListener('change', processFile)
function processFile(event) {
var fileObject = event.target.files[0]
var reader = new FileReader()
reader.addEventListener('load', addImage)
reader.readAsDataURL(fileObject)
function addImage() {
var imgElement = new Image()
imgElement.src = reader.result
document.body.append(imgElement)
}
}
Amend:
function addImage(event) {
// use reader.result is ok too
localStorage.setItem('myimage', event.target.result)
}
function addImageEntry() {
var imgElement = new Image()
imgElement.src = localStorage.getItem('myimage')
imgElement.style.display = 'block'
document.body.append(imgElement)
}
(I'm testing with Protractor and Jasmine, and I've also included image-diff and node-canvas in my project.)
I need to compare two images and make sure they are the same. One is saved in my file structure and the other is created from canvas. I am able to convert the canvas to image and also load the image from the file. Here's what I have:
var imagediff = require('../node_modules/js-imagediff/js/imagediff.js');
var Canvas = require('canvas');
var fs = require('fs');
var path = require('path');
beforeEach(function () {
jasmine.addMatchers(imagediff.jasmine);
});
function loadImage (url, callback) {
var image;
fs.readFile(url, function (error, data) {
if (error) throw error;
image = new Canvas.Image();
image.onload = function () {
callback(image);
};
image.src = data;
});
return image;
}
it('should work', function () {
//executeScript is needed to get the canvas element and convert it
browser.driver.executeScript(function() {
var can = document.querySelector('#workspace-canvas');
var ctx = can.getContext("2d");
var data = can.toDataURL("image/png");
var img = new Image();
img.src = data;
//this code below shows that the image was converted properly
// var link = document.createElement('a');
// link.href = img.src;
// link.download = 'image1.png';
// link.click();
return data;
}).then(function(result) {
newData = result;
var imgPath = path.join(__dirname, './images/image1.png');
loadImage(imgPath, function(image) {
console.log('loadImage');
var canvas = new Canvas();
canvas.width = image.width;
canvas.height = image.height;
var ctx = canvas.getContext("2d");
ctx.drawImage(image, 0, 0);
var data = canvas.toDataURL("image/png");
oldData = data;
//TODO: do a comparison here!
done();
});
});
My question is how do I compare the two images and make sure they are the same? I thought comparing the data URI would work but it doesn't. I'd really like to use image-diff but I'm not sure how to do that. Please help me!
The API of imagediff, has the imagediff.equal(a,b) function, that gets 2 Image objects, which should help you in this case.
If this won't help you, you could always md5 hash both of them and make sure the checksums are equal, more info on this md5, checksum.
Here's how I ended up doing it. I used fs.writeFile to save the new image to my project and then load that image and compare both images with image-diff.
var imagediff = require('../node_modules/js-imagediff/js/imagediff.js');
var Canvas = require('canvas');
var fs = require('fs');
var path = require('path');
beforeEach(function () {
jasmine.addMatchers(imagediff.jasmine);
});
function loadImage (url, callback) {
var image;
fs.readFile(url, function (error, data) {
if (error) throw error;
image = new Canvas.Image();
image.onload = function () {
callback(image);
};
image.src = data;
});
return image;
}
function writeFile (buffer, fileName) {
fs.writeFile(fileName, buffer, function (err) {
if(err) { console.log('ERROR', err); }
});
}
function dataURItoBuffer (dataURI, fileName) {
var byteString = dataURI.split(',')[1];
var buffer = new Buffer(byteString, 'base64');
writeFile(buffer, fileName);
}
it('should work', function () {
//fileName is defined somewhere else
var filePath = './images' + fileName + '.png';
//executeScript is needed to get the canvas element and convert it
browser.driver.executeScript(function() {
var can = document.querySelector('#workspace-canvas');
var ctx = can.getContext("2d");
var data = can.toDataURL("image/png");
return data;
}).then(function(result) {
dataURItoBuffer(result, fileName);
var oldImagePath = path.join(__dirname, './images/' + fileName + '.png');
var newImagePath = path.join(__dirname, filePath);
loadImage(oldImagePath, function(image) {
image1 = image;
loadImage(newImagePath, function(image){
image2 = image;
expect(imagediff.equal(image1, image2, 50)).toBeTruthy(); // 50 pixel tolerance
done();
});
});
});
});
I'm doing a project with HTML and Javascript that will run local with local files.
I need to select a file by input, get its information and then decide if I'll add to my list and reproduce or not. And if Ii decide to use it I'll have to put it on a queue to use later. Otherwise I'll just discard and select another file.
The problem that I'm facing is that I can't find a way to get the video duration just by selecting it on input.
I've searched a lot and I didn't find any method to get the duration. In this code below I tried to use 'file.duration' but it didn't work, it just returns 'undefined'.
This is my input, normal as you can see.
<div id="input-upload-file" class="box-shadow">
<span>upload! (ღ˘⌣˘ღ)</span> <!--ignore the text face lol -->
<input type="file" class="upload" id="fileUp" name="fileUpload" onchange="setFileInfo()">
</div>
And this is the function that I'm using to get all the information.
function setFileInfo(){
showInfo(); //change div content
var file = document.getElementById("fileUp").files[0];
var pid = 1;
var Pname = file.name;
Pname = Pname.slice(0, Pname.indexOf(".")); //get filename without extension
var Ptype = file.type;
var Psize = bytesToSize(file.size); //turns into KB,MB, etc...
var Pprior = setPriority(Ptype); //returns 1, 2 or 3
var Pdur = file.duration;
var Pmem = getMemory(Psize); //returns size * (100 || 10 || 1)
var Pown = 'user';
/* a lot of stuff throwing this info to the HTML */
console.log(Pdur);
}
Is there way to do this? If not, what are the alternatives that can help me?
In modern browsers, You can use the URL API's URL.createObjectURL() with an non appended video element to load the content of your file.
var myVideos = [];
window.URL = window.URL || window.webkitURL;
document.getElementById('fileUp').onchange = setFileInfo;
function setFileInfo() {
var files = this.files;
myVideos.push(files[0]);
var video = document.createElement('video');
video.preload = 'metadata';
video.onloadedmetadata = function() {
window.URL.revokeObjectURL(video.src);
var duration = video.duration;
myVideos[myVideos.length - 1].duration = duration;
updateInfos();
}
video.src = URL.createObjectURL(files[0]);;
}
function updateInfos() {
var infos = document.getElementById('infos');
infos.textContent = "";
for (var i = 0; i < myVideos.length; i++) {
infos.textContent += myVideos[i].name + " duration: " + myVideos[i].duration + '\n';
}
}
<div id="input-upload-file" class="box-shadow">
<span>upload! (ღ˘⌣˘ღ)</span>
<input type="file" class="upload" id="fileUp" name="fileUpload">
</div>
<pre id="infos"></pre>
I needed to validate a single file before continuing to execute more code, here is my method with the help of Kaiido's answer!
onchange event when a user uploads a file:
$("input[type=file]").on("change", function(e) {
var file = this.files[0]; // Get uploaded file
validateFile(file) // Validate Duration
e.target.value = ''; // Clear value to allow new uploads
})
Now validate duration:
function validateFile(file) {
var video = document.createElement('video');
video.preload = 'metadata';
video.onloadedmetadata = function() {
window.URL.revokeObjectURL(video.src);
if (video.duration < 1) {
console.log("Invalid Video! video is less than 1 second");
return;
}
methodToCallIfValid();
}
video.src = URL.createObjectURL(file);
}
Here is async/await Promise version:
const loadVideo = file => new Promise((resolve, reject) => {
try {
let video = document.createElement('video')
video.preload = 'metadata'
video.onloadedmetadata = function () {
resolve(this)
}
video.onerror = function () {
reject("Invalid video. Please select a video file.")
}
video.src = window.URL.createObjectURL(file)
} catch (e) {
reject(e)
}
})
Can be used as follows:
const video = await loadVideo(e.currentTarget.files[0])
console.log(video.duration)
It's simple to get video duration from FileReader and it's easy to manage in async/await.
const getVideoDuration = file =>
new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = () => {
const media = new Audio(reader.result);
media.onloadedmetadata = () => resolve(media.duration);
};
reader.readAsDataURL(file);
reader.onerror = error => reject(error);
});
const duration = await getVideoDuraion(file);
where file is File object
Live Example
const getVideoDuration = (file) =>
new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = () => {
const media = new Audio(reader.result);
media.onloadedmetadata = () => resolve(media.duration);
};
reader.readAsDataURL(file);
reader.onerror = (error) => reject(error);
});
const handleChange = async (e) => {
const duration = await getVideoDuration(e.target.files[0]);
document.querySelector("#duration").innerText = `Duration: ${duration}`;
};
<div>
<input type="file" onchange="handleChange(event)" />
<p id="duration">Duration: </p>
</div>
This is how I managed to get video duration before pushing it on S3..
I am using this code to upload video files of 4+ GB.
Note - for formats like .avi,.flv, .vob, .mpeg etc, duration will not be found, so handle it with a simple message to the user
//same method can be used for images/audio too, making some little changes
getVideoDuration = async (f) => {
const fileCallbackToPromise = (fileObj) => {
return Promise.race([
new Promise((resolve) => {
if (fileObj instanceof HTMLImageElement) fileObj.onload = resolve;
else fileObj.onloadedmetadata = resolve;
}),
new Promise((_, reject) => {
setTimeout(reject, 1000);
}),
]);
};
const objectUrl = URL.createObjectURL(f);
// const isVideo = type.startsWith('video/');
const video = document.createElement("video");
video.src = objectUrl;
await fileCallbackToPromise(video);
return {
duration: video.duration,
width: video.videoWidth,
height: video.videoHeight,
};
}
//call the function
//removing unwanted code for clarity
const meta = await this.getVideoDuration(file);
//meta.width, meta.height, meta.duration is ready for you to use
I've implemented getting video duration in nano-metadata package https://github.com/kalashnikovisme/nano-metadata.
You can do something like this
import nanoMetadata from 'nano-metadata'
const change = (e) => {
const file = e.target.files[0]
nanoMetadata.video.duration(file).then((duration) => {
console.log(duration) // will show you video duration in seconds
})
}
I'm using the FileReader API to read multiple files.
<html> <body>
<input type="file" id="filesx" name="filesx[]"
onchange="readmultifiles(this.files)" multiple=""/>
<div id="bag"><ul/></div>
<script>
window.onload = function() {
if (typeof window.FileReader !== 'function') {
alert("The file API isn't supported on this browser yet.");
}
}
function readmultifiles(files) {
var ul = document.querySelector("#bag>ul");
while (ul.hasChildNodes()) {
ul.removeChild(ul.firstChild);
}
function setup_reader(file) {
var name = file.name;
var reader = new FileReader();
reader.onload = function(e) {
var bin = e.target.result; //get file content
// do sth with text
var li = document.createElement("li");
li.innerHTML = name;
ul.appendChild(li);
}
reader.readAsBinaryString(file);
}
for (var i = 0; i < files.length; i++) { setup_reader(files[i]); }
}
</script> </body> </html>
The problem is that all files are read at the same time, and when the files have a total size (sum) that is very large, the browser crashes.
I want to read one file after another, so that the memory consumption is reduced.
Is this possible?
I came up with a solution myself which works.
function readmultifiles(files) {
var reader = new FileReader();
function readFile(index) {
if( index >= files.length ) return;
var file = files[index];
reader.onload = function(e) {
// get file content
var bin = e.target.result;
// do sth with bin
readFile(index+1)
}
reader.readAsBinaryString(file);
}
readFile(0);
}
I'm updating this question for the benefit of new users, who are looking for a solution to upload multiple files via the FileReader API, especially using ES.
Rather than manually iterating over each file, it's much simpler & cleaner to use Object.keys(files) in ES:
<input type="file" onChange="readmultifiles" multiple/>
<script>
function readmultifiles(e) {
const files = e.currentTarget.files;
Object.keys(files).forEach(i => {
const file = files[i];
const reader = new FileReader();
reader.onload = (e) => {
//server call for uploading or reading the files one-by-one
//by using 'reader.result' or 'file'
}
reader.readAsBinaryString(file);
})
};
</script>
This should read the files one by one:
function readmultifiles(files) {
var ul = document.querySelector("#bag>ul");
while (ul.hasChildNodes()) {
ul.removeChild(ul.firstChild);
}
// Read first file
setup_reader(files, 0);
}
// Don't define functions in functions in functions, when possible.
function setup_reader(files, i) {
var file = files[i];
var name = file.name;
var reader = new FileReader();
reader.onload = function(e){
readerLoaded(e, files, i, name);
};
reader.readAsBinaryString(file);
// After reading, read the next file.
}
function readerLoaded(e, files, i, name) {
// get file content
var bin = e.target.result;
// do sth with text
var li = document.createElement("li");
li.innerHTML = name;
ul.appendChild(li);
// If there's a file left to load
if (i < files.length - 1) {
// Load the next file
setup_reader(files, i+1);
}
}
Define the input using multiple property:
<input onchange = 'upload(event)' type = 'file' multiple/>
Define the upload function:
const upload = async (event) => {
// Convert the FileList into an array and iterate
let files = Array.from(event.target.files).map(file => {
// Define a new file reader
let reader = new FileReader();
// Create a new promise
return new Promise(resolve => {
// Resolve the promise after reading file
reader.onload = () => resolve(reader.result);
// Read the file as a text
reader.readAsText(file);
});
});
// At this point you'll have an array of results
let res = await Promise.all(files);
}
My complete solution is here:
<html> <body>
<input type="file" id="filesx" name="filesx[]"
onchange="readmultifiles(this.files)" multiple=""/>
<div id="bag"></div>
<script>
window.onload = function() {
if (typeof window.FileReader !== 'function') {
alert("The file API isn't supported on this browser yet.");
}
}
function readmultifiles(files) {
var reader = new FileReader();
function readFile(index) {
if( index >= files.length ) return;
var file = files[index];
reader.onload = function(e) {
// get file content
var bin = e.target.result;
// do sth with bin
readFile(index+1)
}
reader.readAsBinaryString(file);
}
readFile(0);
function setup_reader(file) {
var name = file.name;
var reader = new FileReader();
var ul = document.createElement("ul");
document.getElementById('bag').appendChild(ul);
reader.onload = function(e) {
var bin = e.target.result; //get file content
// do sth with text
var li = document.createElement("li");
li.innerHTML = name;
ul.appendChild(li);
}
reader.readAsBinaryString(file);
}
for (var i = 0; i < files.length; i++) { setup_reader(files[i]); }
}
</script> </body> </html>
I implemented another solution using modern JS (Map, Iterator). I adapted the code from my Angular application (originally written with some TS features).
Like Steve KACOU mentioned, we create a different FileReader instance for each file.
<input type="file" id="filesx" name="filesx[]"
onchange="processFileChange(this)" multiple=""/>
function processFileChange(event) {
if (event.target.files && event.target.files.length) {
const fileMap = new Map();
for (let i = 0; i < event.target.files.length; i++) {
const file = event.target.files[i];
const fileReader = new FileReader();
fileMap.set(fileReader, file);
}
const mapEntries = fileMap.entries();
readFile(mapEntries);
}
}
function readFile(mapEntries) {
const nextValue = mapEntries.next();
if (nextValue.done === true) {
return;
}
const [fileReader, file] = nextValue.value;
fileReader.readAsDataURL(file);
fileReader.onload = () => {
// Do black magic for each file here (using fileReader.result)
// Read the next file
readFile(mapEntries);
};
}
Basically this takes advantage of passing objects by reference to perpetuate the map with every iteration. This makes the code quite easy to read in my opinion.
Taking the best parts of these answers.
<input type="file" onchange="readmultifiles(this.files)" multiple />
<script>
function readmultifiles(files) {
for (file of files) {
const reader = new FileReader();
reader.readAsBinaryString(file);
reader.fileName = file.name;
reader.onload = (event) => {
const fileName = event.target.fileName;
const content = event.currentTarget.result;
console.log({ fileName, content });
};
}
}
</script>
You must instantiate a FileReader for each file to read.
function readFiles(event) {
//Get the files
var files = event.input.files || [];
if (files.length) {
for (let index = 0; index < files.length; index++) {
//instantiate a FileReader for the current file to read
var reader = new FileReader();
reader.onload = function() {
var result = reader.result;
console.log(result); //File data
};
reader.readAsDataURL(files[index]);
}
}
}
Try this
const setFileMultiple = (e) => {
e.preventDefault();
//Get the files
let file = [...e.target.files] || [];
file.forEach((item, index) => {
let reader = new FileReader();
reader.onloadend = () => {
console.log("result", reader.result);
};
reader.readAsDataURL(file[index]);
});
};