Converting base64 to Blob and Blob to base64 using FileReader in PhantomJS - javascript

I have angular controller, which should work with images. It have a watcher for property file in scope. If property will contain array of files, these files (only first file) should be read by FileReader and converted to base64 string and added to page. Something like this:
$scope.$watch('file', function (files) {
if (files && files.length > 0) {
if (files[0].type && files[0].type.match('image.*')) {
var reader = new FileReader();
reader.onload = function (e) {
render(e.target.result);
};
reader.readAsDataURL(files[0]);
}
}
});
and render function:
function render (src) {
var image = new Image();
image.addEventListener('load', function () {
if (image.width < MIN_SIZE || image.height < MIN_SIZE) {
$scope.error = $filter('localize')('UploaderWindow_ImageSizeError');
$scope.$apply();
} else {
new global.ICropper('original-image', {
gap: 0,
keepSquare: true,
image: src,
preview: ['cropped-image']
});
}
});
image.addEventListener('error', function () {
$scope.error = $filter('localize')('UploaderWindow_SelectImage');
$scope.$apply();
});
image.src = src;
};
ICropper should create img element in DOM with base64 in src attribute.
The problem is, I have unit test for this functionality. Test case:
it('Should render new image from file input', function () {
var imageData = image.split(',')[1],
imageType = image.split(',')[0].replace('data:', '').replace(';base64', ''),
file = base64toBlob(imageData, imageType);
expect(originalImage.lastElementChild).toBe(null);
runs(function () {
$scope.file = [file];
$scope.$apply();
});
waitsFor(function () {
return originalImage.lastElementChild;
}, 'waiting_original_form');
runs(function () {
expect(originalImage.lastElementChild.src).toBe(image);
});
});
Variable image contains valid base64 string, originalImage.lastElementChild - img element, which should be created by ICropper. The body of base64toBlob function:
function base64toBlob (b64Data, contentType) {
var binary = atob(b64Data.replace(/\s/g, '')),
binaryLength = binary.length,
buffer = new ArrayBuffer(binaryLength),
view = new Uint8Array(buffer);
for (var i = 0; i < binaryLength; i++) {
view[i] = binary.charCodeAt(i);
}
return new Blob([view], {type: contentType});
}
This test successfully passed in Chrome, but not in PhantomJS:
timeout: timed out after 5000 msec waiting for waiting_original_form
I think, it's because load event for image not fired, error fired instead. But I don't understand why? I know, that Blob not defined in PhantomJS, so I use this polyfill: https://github.com/eligrey/Blob.js

Related

Only allow one image at the same time with File API?

Only allow one image at the same time with File API?
What's the correct method to set the restriction for only one image at the same time?
<input id="browse" type="file" multiple>
<div id="imgs"></div>
<style type="text/css">
#imgs {
height: imageheight;
width: imagewidth;
position: absolute;
top: 39px;
left: 9px;
}
</style>
<script type="text/javascript">
window.URL = window.URL || window.webkitURL;
var elBrowse = document.getElementById("browse"),
elPic = document.getElementById("imgs"),
useBlob = false && window.URL;
function readImage(file) {
var reader = new FileReader();
reader.addEventListener("load", function() {
var image = new Image();
image.addEventListener("load", function() {
elPic.appendChild(this);
});
image.src = useBlob ? window.URL.createObjectURL(file) : reader.result;
});
reader.readAsDataURL(file);
}
elBrowse.addEventListener("change", function() {
var files = this.files; {
for (var i = 0; i < files.length; i++) {
var file = files[i];
if ((/\.(png|jpeg|jpg|gif)$/i).test(file.name)) {
readImage(file);
}
}
}
});
</script>
One solution could be a promise chain like this:
function readImage(file) {
//wrap readImage body into a promise
return new Promise((resolve) => {
var reader = new FileReader();
reader.addEventListener("load", function () {
var image = new Image();
image.addEventListener("load", function () {
elPic.appendChild(this);
//resolve the promise after the child is appended so the caller would know when to start the next one
resolve();
});
image.src = useBlob ? window.URL.createObjectURL(file) : reader.result;
});
reader.readAsDataURL(file);
});
}
elBrowse.addEventListener("change", function () {
var files = this.files; {
//start the chain
var chain = Promise.resolve();
for (var i = 0; i < files.length; i++) {
//use let to have properly scoped variable
let file = files[i];
if ((/\.(png|jpeg|jpg|gif)$/i).test(file.name)) {
//append functions to call to the chain
chain = chain.then(() => readImage(file));
}
}
}
});
The call to the next readImage() is performed after resolve() is called in the previous one - after the image is loaded and appended.
You might want to consider using loadend event also, it is emitted even when the loading fails for some reason so it won't break your chain.
https://developer.mozilla.org/en-US/docs/Web/Events/loadend
I started a new approach that leads to the desired result.
<body style="margin:8px">
<img style="position:absolute; top:39px; left:9px"
height="imageheight" width="imagewidth">
<input type="file" onchange="previewFile()">
<script type="text/javascript">
function previewFile() {
var preview = document.querySelector('img');
var file = document.querySelector('input[type=file]').files[0];
var reader = new FileReader();
reader.addEventListener("load", function () {
preview.src = reader.result;
}, false);
if ((/\.(png|jpeg|jpg|gif)$/i).test(file.name)) {
reader.readAsDataURL(file); }
}
</script>

How can I make return works in load function on vue component?

I have vue component like this :
<template>
<section>
...
</section>
</template>
<script>
export default {
...
data() {
return {
allowableTypes: ['jpg', 'jpeg', 'png'],
maximumSize: 4000000
}
},
methods: {
...
onFileChange(e) {
if (this.validate(e.target.files[0])) {
let files = e.target.files,
reader = new FileReader()
// if any values
if (files.length){
reader.onload = (e) => {
this.image = e.target.result
}
reader.readAsDataURL(files[0])
}
}
},
validate(image) {
// validation file type
if (!this.allowableTypes.includes(image.name.split(".").pop().toLowerCase())) {
return false
}
// validation file size
if (image.size > this.maximumSize) {
return false
}
// validation image resolution
let img = new Image()
img.src = window.URL.createObjectURL(image)
let self = this
img.onload = function() {
let width = img.naturalWidth,
height = img.naturalHeight
window.URL.revokeObjectURL(img.src)
if(width != 850 && height != 350) {
return false
}
}
return true
}
}
}
</script>
If user upload image, it will call onFileChange method. Before displaying the image, it will call method validate to validate the image.
I try to validate file size and file type and it works. The problem here is validating the resolution.
From my code, it seems my code is true
But when I try like this:
I upload image with width = 100, height = 100, from the code, should it return `false``.
But when I run my code, it returns true.
Seems return is not working in the img.onload
How can I solve this problem?
A nice way to handle asynchronous validation is by using Promises :
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise
If you are targeting Internet Explorer, make sure to use a polyfill such as this one :
https://github.com/stefanpenner/es6-promise
Your code would then look like this :
onFileChange(e) {
let self = this
this.validate(e.target.files[0])
.then(function() {
let files = e.target.files,
reader = new FileReader()
// if any values
if (files.length) {
reader.onload = (e) => {
self.image = e.target.result
}
reader.readAsDataURL(files[0])
}
})
.catch(function() {
// do something in the case where the image is not valid
})
},
validate(image) {
let self = this
return new Promise(function(resolve, reject) {
// validation file type
if (!self.allowableTypes.includes(image.name.split(".").pop().toLowerCase())) {
reject()
}
// validation file size
if (image.size > self.maximumSize) {
reject()
}
// validation image resolution
let img = new Image()
img.src = window.URL.createObjectURL(image)
img.onload = function() {
let width = img.naturalWidth,
height = img.naturalHeight
window.URL.revokeObjectURL(img.src)
if (width != 850 && height != 350) {
reject()
} else {
resolve()
}
}
})
}
If you do not want or cannot use Promises you could use a Callback to achieve the same behaviour :
onFileChange(e) {
let self = this
let validCallback = function() {
let files = e.target.files,
reader = new FileReader()
// if any values
if (files.length) {
reader.onload = (e) => {
self.image = e.target.result
}
reader.readAsDataURL(files[0])
}
}
let unvalidCallback = function() {
// do something in the case where the image is not valid
}
this.validate(e.target.files[0], validCallback, unvalidCallback)
},
validate(image, validCallback, unvalidCallback) {
// validation file type
if (!this.allowableTypes.includes(image.name.split(".").pop().toLowerCase())) {
unvalidCallback()
return
}
// validation file size
if (image.size > this.maximumSize) {
unvalidCallback()
return
}
// validation image resolution
let img = new Image()
img.src = window.URL.createObjectURL(image)
let self = this
img.onload = function() {
let width = img.naturalWidth,
height = img.naturalHeight
window.URL.revokeObjectURL(img.src)
if (width != 850 && height != 350) {
unvalidCallback()
return
} else {
validCallback()
}
}
}
It's onloadend not onload.
Change your code to this:
let self = this;
var reader = new FileReader();
reader.onloadend = () => {
// you logic here (use self, not this)
}

Sending base64 image to Firebase Storage phonegap

I am having problem in sending my base64 image from phonegap (ios) to firebase storage. The main problem is firebase storage only accepted BLOB or File as attachment.
Heres my code for the camera function. Cordova-plugin-camera
function GetCamera(){
navigator.camera.getPicture( cameraSuccess, cameraError, {quality :50,
destinationType: Camera.DestinationType.DATA_URL,
encodingType: Camera.EncodingType.JPEG,
saveToPhotoAlbum: true});}
function to convert base 64 to blob
function b64toblob(b64_data, content_type) {
content_type = content_type || '';
var slice_size = 512;
var byte_characters = atob(b64_data);
var byte_arrays = [];
for(var offset = 0; offset < byte_characters.length; offset += slice_size) {
var slice = byte_characters.slice(offset, offset + slice_size);
var byte_numbers = new Array(slice.length);
for(var i = 0; i < slice.length; i++) {
byte_numbers[i] = slice.charCodeAt(i);
}
var byte_array = new Uint8Array(byte_numbers);
byte_arrays.push(byte_array);
}
var blob = new Blob(byte_arrays, {type: content_type});
return blob;};
Camera success function. take note that imageblob is a global variable
function cameraSuccess(imageData){
document.getElementById('Attachment1').innerHTML = "Attachment: True";
var image = imageData;
imageblob = b64toblob(image,"image/jpeg");}
putting the blob to firebase storage
try{
var storageRef = storage.ref().child('fire');
var uploadTask = storageRef.put(imageblob);
uploadTask.on('state_changed',null, null, function(){
var downloadURL = uploadTask.snapshot.downloadURL;
console.log("downloadURL :"+downloadURL);
});
i have tried every single thing, but its not working. Really need your guys help.. i am out of ideas
Cordova camera plugin doesn't return file object. That is problem with plugin.
But it returns all details about image. By using that you can create a blob or file object.
Reference for creating blob from file url.
var getFileBlob = function (url, cb) {
var xhr = new XMLHttpRequest();
xhr.open("GET", url);
xhr.responseType = "blob";
xhr.addEventListener('load', function() {
cb(xhr.response);
});
xhr.send();
};
var blobToFile = function (blob, name) {
blob.lastModifiedDate = new Date();
blob.name = name;
return blob;
};
var getFileObject = function(filePathOrUrl, cb) {
getFileBlob(filePathOrUrl, function (blob) {
cb(blobToFile(blob, 'test.jpg')); // Second argument is name of the image
});
};
Calling function for get file blob
getFileObject('img/test.jpg', function (fileObject) { // First argument is path of the file
console.log(fileObject);
});
In your camera success function try this.
function cameraSuccess(imageData){
document.getElementById('Attachment1').innerHTML = "Attachment: True";
getFileObject(imageData.nativeURL, function(fileObject) {
console.log(fileObject);
var imgName = fileObject.name;
var metadata = { contentType: fileObject.type };
var uploadFile = storageRef.child("images/" + imgName).put(fileObject, metadata);
uploadFile.on(firebase.storage.TaskEvent.STATE_CHANGED, function(snapshot) {
var progress = (snapshot.bytesTransferred / snapshot.totalBytes) * 100;
console.log(progress);
}, function(error) {
console.log(error);
}, function() {
var imgFirebaseURL = uploadFile.snapshot.downloadURL;
console.log(imgFirebaseURL);
});
});
}

FileReader's onloadend event is never triggered

I'm trying to make a small snippet to preview images before uploading them:
$.fn.previewImg=function($on){
var input = this;
try{
if (this.is("input[type='file']")) {
input.change(function(){
var reader = new FileReader();
reader.onloadend = function(){
for (var i = 0; i < $on.length; i++) {
if (/img/i.test($on[i].tagName)) $on[i].src = reader.result;
else $on[i].style.bakgroundImage = "url("+reader.result+")";
}
};
});
}else throw new exception("Trying to preview image from an element that is not a file input!");
}catch(x){
console.log(x);
}
};
I'm calling it like:
$("#file").previewImg($(".preview_img"));
but the onloadend function is never called.
FIDDLE
Actually , you got to specify the file and instruct the fileReader to read it.
Below is the corrected code.
$.fn.previewImg=function($on){
var input = this;
try{
if (this.is("input[type='file']")) {
input.change(function(evt){
var reader = new FileReader();
console.log("Input changed");
reader.onloadend = function(){
console.log("onloadend triggered");
for (var i = 0; i < $on.length; i++) {
if (/img/i.test($on[i].tagName)) $on[i].src = reader.result;
else $on[i].style.bakgroundImage = "url("+reader.result+")";
}
};
//get the selected file
var files = evt.target.files;
//instruct reader to read it
reader.readAsDataURL(files[0]);
});
}else throw new exception("Trying to preview image from an element that is not a file input!");
}catch(x){
console.log(x);
}
};
$("#file").previewImg($(".preview_img"));

Reading multiple files with Javascript FileReader API one at a time

I'm using the FileReader API to read multiple files.
<html> <body>
<input type="file" id="filesx" name="filesx[]"
onchange="readmultifiles(this.files)" multiple=""/>
<div id="bag"><ul/></div>
<script>
window.onload = function() {
if (typeof window.FileReader !== 'function') {
alert("The file API isn't supported on this browser yet.");
}
}
function readmultifiles(files) {
var ul = document.querySelector("#bag>ul");
while (ul.hasChildNodes()) {
ul.removeChild(ul.firstChild);
}
function setup_reader(file) {
var name = file.name;
var reader = new FileReader();
reader.onload = function(e) {
var bin = e.target.result; //get file content
// do sth with text
var li = document.createElement("li");
li.innerHTML = name;
ul.appendChild(li);
}
reader.readAsBinaryString(file);
}
for (var i = 0; i < files.length; i++) { setup_reader(files[i]); }
}
</script> </body> </html>
The problem is that all files are read at the same time, and when the files have a total size (sum) that is very large, the browser crashes.
I want to read one file after another, so that the memory consumption is reduced.
Is this possible?
I came up with a solution myself which works.
function readmultifiles(files) {
var reader = new FileReader();
function readFile(index) {
if( index >= files.length ) return;
var file = files[index];
reader.onload = function(e) {
// get file content
var bin = e.target.result;
// do sth with bin
readFile(index+1)
}
reader.readAsBinaryString(file);
}
readFile(0);
}
I'm updating this question for the benefit of new users, who are looking for a solution to upload multiple files via the FileReader API, especially using ES.
Rather than manually iterating over each file, it's much simpler & cleaner to use Object.keys(files) in ES:
<input type="file" onChange="readmultifiles" multiple/>
<script>
function readmultifiles(e) {
const files = e.currentTarget.files;
Object.keys(files).forEach(i => {
const file = files[i];
const reader = new FileReader();
reader.onload = (e) => {
//server call for uploading or reading the files one-by-one
//by using 'reader.result' or 'file'
}
reader.readAsBinaryString(file);
})
};
</script>
This should read the files one by one:
function readmultifiles(files) {
var ul = document.querySelector("#bag>ul");
while (ul.hasChildNodes()) {
ul.removeChild(ul.firstChild);
}
// Read first file
setup_reader(files, 0);
}
// Don't define functions in functions in functions, when possible.
function setup_reader(files, i) {
var file = files[i];
var name = file.name;
var reader = new FileReader();
reader.onload = function(e){
readerLoaded(e, files, i, name);
};
reader.readAsBinaryString(file);
// After reading, read the next file.
}
function readerLoaded(e, files, i, name) {
// get file content
var bin = e.target.result;
// do sth with text
var li = document.createElement("li");
li.innerHTML = name;
ul.appendChild(li);
// If there's a file left to load
if (i < files.length - 1) {
// Load the next file
setup_reader(files, i+1);
}
}
Define the input using multiple property:
<input onchange = 'upload(event)' type = 'file' multiple/>
Define the upload function:
const upload = async (event) => {
// Convert the FileList into an array and iterate
let files = Array.from(event.target.files).map(file => {
// Define a new file reader
let reader = new FileReader();
// Create a new promise
return new Promise(resolve => {
// Resolve the promise after reading file
reader.onload = () => resolve(reader.result);
// Read the file as a text
reader.readAsText(file);
});
});
// At this point you'll have an array of results
let res = await Promise.all(files);
}
My complete solution is here:
<html> <body>
<input type="file" id="filesx" name="filesx[]"
onchange="readmultifiles(this.files)" multiple=""/>
<div id="bag"></div>
<script>
window.onload = function() {
if (typeof window.FileReader !== 'function') {
alert("The file API isn't supported on this browser yet.");
}
}
function readmultifiles(files) {
var reader = new FileReader();
function readFile(index) {
if( index >= files.length ) return;
var file = files[index];
reader.onload = function(e) {
// get file content
var bin = e.target.result;
// do sth with bin
readFile(index+1)
}
reader.readAsBinaryString(file);
}
readFile(0);
function setup_reader(file) {
var name = file.name;
var reader = new FileReader();
var ul = document.createElement("ul");
document.getElementById('bag').appendChild(ul);
reader.onload = function(e) {
var bin = e.target.result; //get file content
// do sth with text
var li = document.createElement("li");
li.innerHTML = name;
ul.appendChild(li);
}
reader.readAsBinaryString(file);
}
for (var i = 0; i < files.length; i++) { setup_reader(files[i]); }
}
</script> </body> </html>
I implemented another solution using modern JS (Map, Iterator). I adapted the code from my Angular application (originally written with some TS features).
Like Steve KACOU mentioned, we create a different FileReader instance for each file.
<input type="file" id="filesx" name="filesx[]"
onchange="processFileChange(this)" multiple=""/>
function processFileChange(event) {
if (event.target.files && event.target.files.length) {
const fileMap = new Map();
for (let i = 0; i < event.target.files.length; i++) {
const file = event.target.files[i];
const fileReader = new FileReader();
fileMap.set(fileReader, file);
}
const mapEntries = fileMap.entries();
readFile(mapEntries);
}
}
function readFile(mapEntries) {
const nextValue = mapEntries.next();
if (nextValue.done === true) {
return;
}
const [fileReader, file] = nextValue.value;
fileReader.readAsDataURL(file);
fileReader.onload = () => {
// Do black magic for each file here (using fileReader.result)
// Read the next file
readFile(mapEntries);
};
}
Basically this takes advantage of passing objects by reference to perpetuate the map with every iteration. This makes the code quite easy to read in my opinion.
Taking the best parts of these answers.
<input type="file" onchange="readmultifiles(this.files)" multiple />
<script>
function readmultifiles(files) {
for (file of files) {
const reader = new FileReader();
reader.readAsBinaryString(file);
reader.fileName = file.name;
reader.onload = (event) => {
const fileName = event.target.fileName;
const content = event.currentTarget.result;
console.log({ fileName, content });
};
}
}
</script>
You must instantiate a FileReader for each file to read.
function readFiles(event) {
//Get the files
var files = event.input.files || [];
if (files.length) {
for (let index = 0; index < files.length; index++) {
//instantiate a FileReader for the current file to read
var reader = new FileReader();
reader.onload = function() {
var result = reader.result;
console.log(result); //File data
};
reader.readAsDataURL(files[index]);
}
}
}
Try this
const setFileMultiple = (e) => {
e.preventDefault();
//Get the files
let file = [...e.target.files] || [];
file.forEach((item, index) => {
let reader = new FileReader();
reader.onloadend = () => {
console.log("result", reader.result);
};
reader.readAsDataURL(file[index]);
});
};

Categories

Resources