JavaScript readAsBinaryString Function on E11 - javascript

In this page http://www.html5rocks.com/en/tutorials/file/dndfiles/ if you scroll down to example "Example: Slicing a file. Try it!" you will see uses of readAsBinaryString API to read bytes of local files.
I've seen IE (My case its IE11) doesn't support readAsBinaryString.
Even this code mentioned in post HTML5 File API read as text and binary breaks at readAsBinaryString in IE11.
I have seen some post in stack overflow, it suggests use of ReadAsArrayBuffer(). But it is also not working. It returns undefined.
My question is what are the options if I have to run it on IE11? Is it possible to write another IE compatible JS function which will do the JOB of readAsBinaryString().

I combine #Jack answer with my comment to show a complete working example.
In the <head> section I added this script to add FileReader.readAsBinaryString function in IE11
if (FileReader.prototype.readAsBinaryString === undefined) {
FileReader.prototype.readAsBinaryString = function (fileData) {
var binary = "";
var pt = this;
var reader = new FileReader();
reader.onload = function (e) {
var bytes = new Uint8Array(reader.result);
var length = bytes.byteLength;
for (var i = 0; i < length; i++) {
binary += String.fromCharCode(bytes[i]);
}
//pt.result - readonly so assign content to another property
pt.content = binary;
pt.onload(); // thanks to #Denis comment
}
reader.readAsArrayBuffer(fileData);
}
}
Then I needed to slightly modify my original script code because target.result has no value when using this fallback function.
var reader = new FileReader();
reader.onload = function (e) {
// ADDED CODE
if (!e) {
var data = reader.content;
}
else {
var data = e.target.result;
}
// business code
};
reader.readAsBinaryString(myFile);

This is my solution.
var reader = new FileReader();
reader.readAsBinaryString(fileData);
reader.onload = function(e) {
if (reader.result) reader.content = reader.result;
var base64Data = btoa(reader.content);
//...
}
//extend FileReader
if (!FileReader.prototype.readAsBinaryString) {
FileReader.prototype.readAsBinaryString = function (fileData) {
var binary = "";
var pt = this;
var reader = new FileReader();
reader.onload = function (e) {
var bytes = new Uint8Array(reader.result);
var length = bytes.byteLength;
for (var i = 0; i < length; i++) {
binary += String.fromCharCode(bytes[i]);
}
//pt.result - readonly so assign binary
pt.content = binary;
$(pt).trigger('onload');
}
reader.readAsArrayBuffer(fileData);
}
}

FileReader.readAsBinaryString is a non-standard function and has been deprecated.
FileReader.readAsArrayBuffer should be used instead.
MDN

For IE 11 you can use this XHR trick:
function blobToBinaryStringIE11(blob) {
var blobURL = URL.createObjectURL(blob);
var xhr = new XMLHttpRequest;
xhr.open("get", blobURL);
xhr.overrideMimeType("text/plain; charset=x-user-defined");
xhr.onload = function () {
var binary = xhr.response;
// do stuff
};
xhr.send();
}
It's 20x faster than the Uint8Array + fromCharCode route and as fast as readAsBinaryString.

Replace
reader.readAsBinaryString(blob);
with:
reader.readAsText(blob);
it's works well in cross browser.

I had some problems with the answers here and ended up making a few slight changes.
Instead of assigning to pt.content, my solution is to send a custom object to the prototype's onload, that receiver can specifically look for, I named this property msieContent so it will be very specific.
Also I used other accepted answer for converting Uint8Array to string in more robust way, you can see full details of it here:
https://stackoverflow.com/a/12713326/213050
Polyfill
if (FileReader.prototype.readAsBinaryString === undefined) {
// https://stackoverflow.com/a/12713326/213050
function Uint8ToString(u8a: Uint8Array) {
const CHUNK_SZ = 0x8000;
let c = [];
for (let i = 0; i < u8a.length; i += CHUNK_SZ) {
c.push(String.fromCharCode.apply(null, u8a.subarray(i, i + CHUNK_SZ)));
}
return c.join('');
}
FileReader.prototype.readAsBinaryString = function (fileData) {
const reader = new FileReader();
reader.onload = () => this.onload({
msieContent: Uint8ToString(new Uint8Array(<any>reader.result))
});
reader.readAsArrayBuffer(fileData);
}
}
Usage
private _handleTextFile(file: File) {
const reader = new FileReader();
reader.onload = (e) => {
// support for msie, see polyfills.ts
const readResult: string = (<any>e).msieContent || <string>e.target.result;
};
reader.readAsBinaryString(file);
}

Related

How do I read value of variable defined in FileReader.onloadend inside XMLHttpRequest.onreadystatechange callback functin?

In the code below, I need read the value of variable ext (defined inside the function reader.onloadend) in the function xhr.onreadystatechange. Right now is returning undefined. Any ideas how to fix that?
function file_upload(file_input) {
var name = file_input.dataset.target;
var url = file_input.dataset.url;
var path = file_input.dataset.path;
var ext;
for(var i = 0; i<file_input.files.length; i++) {
var file = file_input.files[i];
var xhr = new XMLHttpRequest();
xhr.open("POST", url, true);
xhr.onreadystatechange = function() {
if (xhr.readyState == 4 && xhr.status == 200) {
var id = xhr.responseText;
var input = document.createElement("input");
input.setAttribute("type", "hidden");
input.setAttribute("name", name);
input.setAttribute("value", id);
document.getElementById("btn_uload_"+name+"_"+ext).append(input);
document.getElementById("empty_file").style.display = 'none';
document.getElementById("single_file_"+ext).style.display = 'block';
}
};
var reader = new FileReader();
reader.onloadend = function() {
var bytes = reader.result;
var ext = file.name.split(".").pop();
var formData = new FormData();
formData.append('bytes', bytes);
formData.append('type', ext);
xhr.send(formData);
}
reader.readAsDataURL(file);
}
}
Remove the var ext =... from the reader callback that is only making a local variable for that function (it isnt setting the external one. Even if you remove the var keyword so that it starts setting the external ext variable you will end up hitting an issue discussed at this q&a
Move your assignment to the top of the loop(not the top of the function). And use let instead of var that way it is block scoped and wont run into the previously mentioned issue
for(var i = 0; i<file_input.files.length; i++) {
var file = file_input.files[i];
let ext = file.name.split('.').pop();

XMLHttpRequest: Access object created in oReq.onload

I am using an XMLHttpRequest and sheetjs (https://github.com/SheetJS/js-xlsx) to create a json object from an excel spreadsheet. All is working well except I am struggling to access the created object outside of oReq.onload. After creating the json object I would like to use it as the input object to more code below the XMLHttpRequest.
I have tried to return json at the end of oReq.onload and also tried include my additional code inside function (e) {} but neither option has worked. Any suggestions would be appreciated.
/* set up XMLHttpRequest */
var url = "graph.xlsx";
var oReq = new XMLHttpRequest();
oReq.open("GET", url, true);
oReq.responseType = "arraybuffer";
oReq.onload = function(e) {
var arraybuffer = oReq.response;
/* convert data to binary string */
var data = new Uint8Array(arraybuffer);
var arr = new Array();
for(var i = 0; i != data.length; ++i) arr[i] = String.fromCharCode(data[i]);
var bstr = arr.join("");
/* Call XLSX */
var workbook = XLSX.read(bstr, {type:"binary"});
/* Create the json object */
var json = to_json(workbook);
/*Converts excel format to JSON*/
function to_json(workbook) {
var result = {};
workbook.SheetNames.forEach(function(sheetName) {
var roa = XLSX.utils.sheet_to_row_object_array(workbook.Sheets[sheetName]);
if(roa.length > 0){
result[sheetName] = roa;
}
});
return result;
}
};
oReq.send();
/*I WOULD LIKE TO USE THE CREATED JSON OBJECT HERE*/
Ended up being an asynchronous problem as suggested by #epascarello. A very comprehensive explanation can be found in the post: Why is my variable unaltered after I modify it inside of a function? - Asynchronous code reference
using callback you can get the data outside the function, as is explained here https://stackoverflow.com/a/23667087/7981344
function asyncReq(callback){
oReq.onload = function(e) {
var arraybuffer = oReq.response;
var data = new Uint8Array(arraybuffer);
var arr = new Array();
for (var i = 0; i != data.length; ++i) arr[i] = String.fromCharCode(data[i]);
var bstr = arr.join("");
var workbook = XLSX.read(bstr, {
type: "binary"
});
var sheet_name = workbook.SheetNames[0];
var worksheet = workbook.Sheets[sheet_name];
myJson = XLSX.utils.sheet_to_json(worksheet, {
header: 1
});
callback(myJson);
}
}
oReq.send();
asyncReq(function(result){
//do whatever you want now with the output data
console.log(result);
hope this is not too late, but i came across the same problem.
just set a timeout, and wait for the onload function to finish
like this
setTimeout(function() {
console.log (yourvariable); //prints the variable outside the function
}, 1000);

Convert input=file to byte array

I try to convert a file that i get through an input file into a byte[].
I tried with a FileReader, but i must miss something :
var bytes = [];
var reader = new FileReader();
reader.onload = function () {
bytes = reader.result;
};
reader.readAsArrayBuffer(myFile);
But in the end, my bytes var doesn't content a byte array.
I saw this post : Getting byte array through input type = file but it doesn't ends with a byte[], and readAsBinaryString() is deprecated
What do i miss?
Faced a similar problem and its true the 'reader.result' doesn't end up as 'byte[]'. So I have cast it to Uint8Array object. This too is not a perfect 'byte[]' ,so I had to create a 'byte[]' from it. Here is my solution to this problem and it worked well for me.
var reader = new FileReader();
var fileByteArray = [];
reader.readAsArrayBuffer(myFile);
reader.onloadend = function (evt) {
if (evt.target.readyState == FileReader.DONE) {
var arrayBuffer = evt.target.result,
array = new Uint8Array(arrayBuffer);
for (var i = 0; i < array.length; i++) {
fileByteArray.push(array[i]);
}
}
}
'fileByteArray' is what you are looking for. Saw the comments and seems you did the same, still wanted to share the approach.
Seems to me you just want to get files into an array? How about these functions - one where you can read it as text, another as a base64 byte string, and if you really want the readAsArrayBuffer array buffer output, I've included that, too:
document.getElementById("myBtn").addEventListener("click", function() {
uploadFile3();
});
var fileByteArray = [];
function uploadFile1(){
var files = myInput.files[0];
var reader = new FileReader();
reader.onload = processFile(files);
reader.readAsText(files);
}
function uploadFile2(){
var files = document.querySelector('input').files[0];
var reader = new FileReader();
reader.onload = processFile(files);
reader.readAsDataURL(files);
}
function uploadFile3(){
var files = myInput.files[0];
var reader = new FileReader();
reader.onload = processFile(files);
reader.readAsArrayBuffer(files);
}
function processFile(theFile){
return function(e) {
var theBytes = e.target.result; //.split('base64,')[1]; // use with uploadFile2
fileByteArray.push(theBytes);
document.getElementById('file').innerText = '';
for (var i=0; i<fileByteArray.length; i++) {
document.getElementById('file').innerText += fileByteArray[i];
}
}
}
<input id="myInput" type="file">
<button id="myBtn">Try it</button>
<span id="file"></span>
this works very well for me in React JS:
const handleUpload = async (e) => {
let image = e.currentTarget.files[0];
const buffer = await image.arrayBuffer();
let byteArray = new Int8Array(buffer);
console.log(byteArray)
formik.setFieldValue(name, byteArray);
}
Here is a modified, and in my opinion easier version of the accepted answer. This function returns a Promise with a value of the byte[].
function fileToByteArray(file) {
return new Promise((resolve, reject) => {
try {
let reader = new FileReader();
let fileByteArray = [];
reader.readAsArrayBuffer(file);
reader.onloadend = (evt) => {
if (evt.target.readyState == FileReader.DONE) {
let arrayBuffer = evt.target.result,
array = new Uint8Array(arrayBuffer);
for (byte of array) {
fileByteArray.push(byte);
}
}
resolve(fileByteArray);
}
}
catch (e) {
reject(e);
}
})
}
This way you can simply call this function in an async function like this
async function getByteArray() {
//Get file from your input element
let myFile = document.getElementById('myFileInput').files[0];
//Wait for the file to be converted to a byteArray
let byteArray = await fileToByteArray(myFile);
//Do something with the byteArray
console.log(byteArray);
}

FileReader's onloadend event is never triggered

I'm trying to make a small snippet to preview images before uploading them:
$.fn.previewImg=function($on){
var input = this;
try{
if (this.is("input[type='file']")) {
input.change(function(){
var reader = new FileReader();
reader.onloadend = function(){
for (var i = 0; i < $on.length; i++) {
if (/img/i.test($on[i].tagName)) $on[i].src = reader.result;
else $on[i].style.bakgroundImage = "url("+reader.result+")";
}
};
});
}else throw new exception("Trying to preview image from an element that is not a file input!");
}catch(x){
console.log(x);
}
};
I'm calling it like:
$("#file").previewImg($(".preview_img"));
but the onloadend function is never called.
FIDDLE
Actually , you got to specify the file and instruct the fileReader to read it.
Below is the corrected code.
$.fn.previewImg=function($on){
var input = this;
try{
if (this.is("input[type='file']")) {
input.change(function(evt){
var reader = new FileReader();
console.log("Input changed");
reader.onloadend = function(){
console.log("onloadend triggered");
for (var i = 0; i < $on.length; i++) {
if (/img/i.test($on[i].tagName)) $on[i].src = reader.result;
else $on[i].style.bakgroundImage = "url("+reader.result+")";
}
};
//get the selected file
var files = evt.target.files;
//instruct reader to read it
reader.readAsDataURL(files[0]);
});
}else throw new exception("Trying to preview image from an element that is not a file input!");
}catch(x){
console.log(x);
}
};
$("#file").previewImg($(".preview_img"));

Reading multiple files with Javascript FileReader API one at a time

I'm using the FileReader API to read multiple files.
<html> <body>
<input type="file" id="filesx" name="filesx[]"
onchange="readmultifiles(this.files)" multiple=""/>
<div id="bag"><ul/></div>
<script>
window.onload = function() {
if (typeof window.FileReader !== 'function') {
alert("The file API isn't supported on this browser yet.");
}
}
function readmultifiles(files) {
var ul = document.querySelector("#bag>ul");
while (ul.hasChildNodes()) {
ul.removeChild(ul.firstChild);
}
function setup_reader(file) {
var name = file.name;
var reader = new FileReader();
reader.onload = function(e) {
var bin = e.target.result; //get file content
// do sth with text
var li = document.createElement("li");
li.innerHTML = name;
ul.appendChild(li);
}
reader.readAsBinaryString(file);
}
for (var i = 0; i < files.length; i++) { setup_reader(files[i]); }
}
</script> </body> </html>
The problem is that all files are read at the same time, and when the files have a total size (sum) that is very large, the browser crashes.
I want to read one file after another, so that the memory consumption is reduced.
Is this possible?
I came up with a solution myself which works.
function readmultifiles(files) {
var reader = new FileReader();
function readFile(index) {
if( index >= files.length ) return;
var file = files[index];
reader.onload = function(e) {
// get file content
var bin = e.target.result;
// do sth with bin
readFile(index+1)
}
reader.readAsBinaryString(file);
}
readFile(0);
}
I'm updating this question for the benefit of new users, who are looking for a solution to upload multiple files via the FileReader API, especially using ES.
Rather than manually iterating over each file, it's much simpler & cleaner to use Object.keys(files) in ES:
<input type="file" onChange="readmultifiles" multiple/>
<script>
function readmultifiles(e) {
const files = e.currentTarget.files;
Object.keys(files).forEach(i => {
const file = files[i];
const reader = new FileReader();
reader.onload = (e) => {
//server call for uploading or reading the files one-by-one
//by using 'reader.result' or 'file'
}
reader.readAsBinaryString(file);
})
};
</script>
This should read the files one by one:
function readmultifiles(files) {
var ul = document.querySelector("#bag>ul");
while (ul.hasChildNodes()) {
ul.removeChild(ul.firstChild);
}
// Read first file
setup_reader(files, 0);
}
// Don't define functions in functions in functions, when possible.
function setup_reader(files, i) {
var file = files[i];
var name = file.name;
var reader = new FileReader();
reader.onload = function(e){
readerLoaded(e, files, i, name);
};
reader.readAsBinaryString(file);
// After reading, read the next file.
}
function readerLoaded(e, files, i, name) {
// get file content
var bin = e.target.result;
// do sth with text
var li = document.createElement("li");
li.innerHTML = name;
ul.appendChild(li);
// If there's a file left to load
if (i < files.length - 1) {
// Load the next file
setup_reader(files, i+1);
}
}
Define the input using multiple property:
<input onchange = 'upload(event)' type = 'file' multiple/>
Define the upload function:
const upload = async (event) => {
// Convert the FileList into an array and iterate
let files = Array.from(event.target.files).map(file => {
// Define a new file reader
let reader = new FileReader();
// Create a new promise
return new Promise(resolve => {
// Resolve the promise after reading file
reader.onload = () => resolve(reader.result);
// Read the file as a text
reader.readAsText(file);
});
});
// At this point you'll have an array of results
let res = await Promise.all(files);
}
My complete solution is here:
<html> <body>
<input type="file" id="filesx" name="filesx[]"
onchange="readmultifiles(this.files)" multiple=""/>
<div id="bag"></div>
<script>
window.onload = function() {
if (typeof window.FileReader !== 'function') {
alert("The file API isn't supported on this browser yet.");
}
}
function readmultifiles(files) {
var reader = new FileReader();
function readFile(index) {
if( index >= files.length ) return;
var file = files[index];
reader.onload = function(e) {
// get file content
var bin = e.target.result;
// do sth with bin
readFile(index+1)
}
reader.readAsBinaryString(file);
}
readFile(0);
function setup_reader(file) {
var name = file.name;
var reader = new FileReader();
var ul = document.createElement("ul");
document.getElementById('bag').appendChild(ul);
reader.onload = function(e) {
var bin = e.target.result; //get file content
// do sth with text
var li = document.createElement("li");
li.innerHTML = name;
ul.appendChild(li);
}
reader.readAsBinaryString(file);
}
for (var i = 0; i < files.length; i++) { setup_reader(files[i]); }
}
</script> </body> </html>
I implemented another solution using modern JS (Map, Iterator). I adapted the code from my Angular application (originally written with some TS features).
Like Steve KACOU mentioned, we create a different FileReader instance for each file.
<input type="file" id="filesx" name="filesx[]"
onchange="processFileChange(this)" multiple=""/>
function processFileChange(event) {
if (event.target.files && event.target.files.length) {
const fileMap = new Map();
for (let i = 0; i < event.target.files.length; i++) {
const file = event.target.files[i];
const fileReader = new FileReader();
fileMap.set(fileReader, file);
}
const mapEntries = fileMap.entries();
readFile(mapEntries);
}
}
function readFile(mapEntries) {
const nextValue = mapEntries.next();
if (nextValue.done === true) {
return;
}
const [fileReader, file] = nextValue.value;
fileReader.readAsDataURL(file);
fileReader.onload = () => {
// Do black magic for each file here (using fileReader.result)
// Read the next file
readFile(mapEntries);
};
}
Basically this takes advantage of passing objects by reference to perpetuate the map with every iteration. This makes the code quite easy to read in my opinion.
Taking the best parts of these answers.
<input type="file" onchange="readmultifiles(this.files)" multiple />
<script>
function readmultifiles(files) {
for (file of files) {
const reader = new FileReader();
reader.readAsBinaryString(file);
reader.fileName = file.name;
reader.onload = (event) => {
const fileName = event.target.fileName;
const content = event.currentTarget.result;
console.log({ fileName, content });
};
}
}
</script>
You must instantiate a FileReader for each file to read.
function readFiles(event) {
//Get the files
var files = event.input.files || [];
if (files.length) {
for (let index = 0; index < files.length; index++) {
//instantiate a FileReader for the current file to read
var reader = new FileReader();
reader.onload = function() {
var result = reader.result;
console.log(result); //File data
};
reader.readAsDataURL(files[index]);
}
}
}
Try this
const setFileMultiple = (e) => {
e.preventDefault();
//Get the files
let file = [...e.target.files] || [];
file.forEach((item, index) => {
let reader = new FileReader();
reader.onloadend = () => {
console.log("result", reader.result);
};
reader.readAsDataURL(file[index]);
});
};

Categories

Resources