Migrate FileReader ReadAsBinaryString() to ReadAsArrayBuffer() or ReadAsText() - javascript

I realize that the new Mozilla Firefox return allocation size overflow (on FileReader.ReadAsBinaryString()) when the file bigger than 200MB (something like that).
Here's some of my code on test for client web browser:
function upload(fileInputId, fileIndex)
{
var file = document.getElementById(fileInputId).files[fileIndex];
var blob;
var reader = new FileReader();
reader.readAsBinaryString(file);
reader.onloadend = function(evt)
{
xhr = new XMLHttpRequest();
xhr.open("POST", "upload.php", true);
XMLHttpRequest.prototype.mySendAsBinary = function(text){
var data = new ArrayBuffer(text.length);
var ui8a = new Uint8Array(data, 0);
for (var i = 0; i < text.length; i++){
ui8a[i] = (text.charCodeAt(i) & 0xff);
}
if(typeof window.Blob == "function")
{
blob = new Blob([data]);
}else{
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(data);
blob = bb.getBlob();
}
this.send(blob);
}
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", function(e) {
var position = e.position || e.loaded;
var total = e.totalSize || e.total;
var percentage = Math.round((position/total)*100);
});
xhr.onreadystatechange = function()
{
if(xhr.readyState == 4)
{
if(xhr.status == 200)
{
console.log("Done");
}else{
console.log("Fail");
}
}
};
xhr.mySendAsBinary(evt.target.result);
};
}
So I tried change it to FileReader.ReadAsArrayBuffer(), the error has not shown up but the data are not the same (as it's not read as binary string).
Did anyone has any solution to solve this problem? Is there any way that we can upload bigger file from JS to Web Server in raw/string other than FileReader implementation?
I read on Mozilla JS Documentation that said:
This feature is non-standard and is not on a standards track. Do not
use it on production sites facing the Web: it will not work for every
user. There may also be large incompatibilities between
implementations and the behavior may change in the future. - Mozilla
If not ReadAsBinaryString, the how to implement ReadAsArrayBuffer or ReadAsText

To send Files to a web-server, you simply don't need js. HTML alone is well able to do this with the <form> element.
Now if you want to go through js, for e.g catch the different ProgressEvents, then you can send directly your File, no need to read it whatsoever on your side.
To do this, you've got two (or three) solutions.
If your server is able to handle PUT requests, you can simply xhr.send(file);.
Otherwise, you'd have to go through a FormData.
// if you really want to go the XHR way
document.forms[0].onsubmit = function handleSubmit(evt) {
if(!window.FormData) { // old browser use the <form>
return;
}
// now we handle the submit through js
evt.preventDefault();
var fD = new FormData(this);
var xhr = new XMLHttpRequest();
xhr.onprogress = function handleProgress(evt){};
xhr.onload = function handleLoad(evt){};
xhr.onerror = function handleError(evt){};
xhr.open(this.method, this.action);
// xhr.send(fD); // won't work in StackSnippet
log(fD, this.method, this.action); // so we just log its content
};
function log(formData, method, action) {
console.log('would have sent');
for(let [key, val] of formData.entries())
console.log(key, val);
console.log('through', method);
console.log('to', action);
}
<!-- this in itself is enough -->
<form method="POST" action="your_server.page">
<input type="file" name="file_upload">
<input type="submit">
</form>
Now, you sent a comment saying that you can't upload Files bigger than 1GB to your server.
This limitation is only due to your server's config, so the best if you want to accept such big files is to configure it correctly.
But if you really want to send your File by chunks, even then don't get off of the Blob interface.
Indeed Blobs have a slice() method, so use it.
document.forms[0].onsubmit = function handleSubmit(evt) {
evt.preventDefault();
var file = this.elements[0].files[0];
var processed = 0;
if(file) {
// var MAX_CHUNK_SIZE = Math.min(file.size, server_max_size);
// for demo we just split in 10 chunks
var MAX_CHUNK_SIZE = file.size > 10 ? (file.size / 10) | 0 : 1;
loadChunk(0);
}
function loadChunk(start) {
var fD = new FormData();
var sliced = file.slice(start, start+MAX_CHUNK_SIZE);
processed += sliced.size; // only for demo
fD.append('file_upload', sliced, file.name);
fD.append('starting_index', start);
if(start + MAX_CHUNK_SIZE >= file.size) {
fD.append('last_chunk', true);
}
var xhr = new XMLHttpRequest();
xhr.open('POST', 'your_server.page');
xhr.onload = function onchunkposted(evt) {
if(start + MAX_CHUNK_SIZE >= file.size) {
console.log('All done. Original file size: %s, total of chunks sizes %s', file.size, processed);
return;
}
loadChunk(start + MAX_CHUNK_SIZE);
};
// xhr.send(fD);
log(fD);
setTimeout(xhr.onload, 200); // fake XHR onload
}
};
function log(formData, method, action) {
console.log('would have sent');
for(let [key, val] of formData.entries())
console.log(key, val);
}
<form method="POST" action="your_server.page">
<input type="file" name="file_upload">
<input type="submit">
</form>
But you absolutely don't need to go through a FileReader for this operation.
Actually the only case where it could make sense to use a FileReader here would be for some Android browsers that don't support passing Blob into a FormData, even though they don't give a single clue about it.
So in this case, you'd have to set up your server to let you know the request was empty, and then only read the File as a dataURI that you would send in-place of the original File.

after a long week of research and sleepless nights, you can't upload binary strings without breaking it, also base64 doesn't work for all files, only images, the journey from the client-side to the server breaks the bytes being sent

Kaiido statement is correct
To send Files to a web-server, you simply don't need js
But that doesn't answer my question. Using the Simple XMLHttpRequest() can upload the file and track those progress as well. But still, it's not it. The direct upload, either from the <form> or using XMLHttpRequest() will need to increase your upload limit in php setting. This method is not convenience for me. How if the client upload file as 4GB? So I need to increase to 4GB. Then next time, client upload file as 6GB, then I have to increase to 6GB.
Using the slice() method is make sense for bigger file as we can send it part by part to server. But this time I am not using it yet.
Here's some of my test the worked as I want. I hope some expert could correct me if I am wrong.
My Upload.js
function upload(fileInputId, fileIndex)
{
var file = document.getElementById(fileInputId).files[fileIndex];
var blob;
var reader = new FileReader();
reader.readAsArrayBuffer(file);
reader.onloadend = function(evt)
{
xhr = new XMLHttpRequest();
xhr.open("POST", "upload.php?name=" + base64_encode(file.name), true);
XMLHttpRequest.prototype.mySendAsBinary = function(text){
var ui8a = new Uint8Array(new Int8Array(text));
if(typeof window.Blob == "function")
{
blob = new Blob([ui8a]);
}else{
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(ui8a);
blob = bb.getBlob();
}
this.send(blob);
}
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", function(e) {
var position = e.position || e.loaded;
var total = e.totalSize || e.total;
var percentage = Math.round((position/total)*100);
console.log(percentage);
});
xhr.onreadystatechange = function()
{
if(xhr.readyState == 4)
{
if(xhr.status == 200)
{
console.log("Done");
}else{
console.log("Fail");
}
}
};
xhr.mySendAsBinary(evt.target.result);
};
}
Below is how the PHP server listen to the ArrayBuffer from JS
if(isset($_GET["name"])){
$name = base64_decode($_GET["name"]);
$loc = $name;
$inputHandler = fopen('php://input', "r");
$fileHandler = fopen($loc, "w+");
while(true) {
//$buffer = fgets($inputHandler, 1024);
$buffer = fread($inputHandler, 1000000);
if (strlen($buffer) == 0) {
fclose($inputHandler);
fclose($fileHandler);
return true;
}
//$b = base64_encode($buffer);
fwrite($fileHandler, $buffer);
}
}
The above method works well. The FileReader read the file as ArrayBuffer the upload to server. For me, migrating from ReadAsBinaryString() to ReadAsArrayBuffer() is important and ReadAsArrayBuffer() has some better performance rather than ReadAsBinaryString()
Here's some reason, why some developer relies to FileReader API:
Streaming. Using this method, the file will be stream, so we can avoid setting the php multiple time.
Easy Encrypt. As the file is sending via ArrayBuffer, it is easy for developer to Encrypt the file while upload in progress.
This method also support upload any type of file. I ve done some test and I realize that ReadAsArrayBuffer() method are more faster than ReadAsBinaryString() and direct form upload. You may try it.
Security Notice
The above code is only under test code, to use it in production, you have to consider sending the data in GET or POST under HTTPS.

Related

HTML Rename download link

I have a mp3 link like this :
http://example.com/932937293723.mp3
but i want to rename it when user downloads the file to be like this
http://example.com/Artist - Title.mp3
My code :
DOWNLOAD
The mp3 file stored in remote server. And i'm not the owner of that server.
HTML download attribute seem not good solution. because it's not cross-browser. Any cross-browser solution to solve this ? Javascript maybe :D
If you insist on working from the front end, try working with the following code. The getblob method is depreciated, but you need to update that side. Let me know.
function getBinary(file){
var xhr = new XMLHttpRequest();
xhr.open("GET", file, false);
xhr.overrideMimeType("text/plain; charset=x-user-defined");
xhr.send(null);
return xhr.responseText;
}
function sendBinary(data, url){
var xhr = new XMLHttpRequest();
xhr.open("POST", url, true);
if (typeof XMLHttpRequest.prototype.sendAsBinary == "function") { // Firefox 3 & 4
var tmp = '';
for (var i = 0; i < data.length; i++) tmp += String.fromCharCode(data.charCodeAt(i) & 0xff);
data = tmp;
}
else { // Chrome 9
// http://javascript0.org/wiki/Portable_sendAsBinary
XMLHttpRequest.prototype.sendAsBinary = function(text){
var data = new ArrayBuffer(text.length);
var ui8a = new Uint8Array(data, 0);
for (var i = 0; i < text.length; i++) ui8a[i] = (text.charCodeAt(i) & 0xff);
var bb = new BlobBuilder(); // doesn't exist in Firefox 4
bb.append(data);
var blob = bb.getBlob();
this.send(blob);
}
}
xhr.sendAsBinary(data);
}
var data = getBinary("My music.mp3");
sendBinary(data,'http://www.tonycuffe.com/mp3/tailtoddle_lo.mp3');
In your back end code, you can fetch the file to your server, store it to a variable, rename it from there, define the corresponding headers, and return it. this could happen as an ajax call initiated on the javascript click.
Post further details about your backed and i can help you more.
You can use something like below (ASP.NET)
In ASPX
Download
In ASP.NET
Response.ContentType = "audio/mpeg3";
Response.AddHeader("content-disposition", "attachment;filename=New_file_name.mp3");
Server.Transfer(decoded_URL_of_MP3_file);
Look here for other MIME types
Update#1 - Using Javascript alone, you can try something like this, though I've not tested in different browsers
function Download(url, fancyFileName)
{
var file = document.createElement('a');
file.href = url;
file.target = '_blank';
file.download = fancyFileName;
var event = document.createEvent('Event');
event.initEvent('click', true, true);
file.dispatchEvent(event);
window.URL.revokeObjectURL(file.href);
}
Download('http://server.com/file.mp3','Artist_file.mp3');

Uploading large files via XHR fails with Chrome, works with Firefox

I am uploading video files to my server. The files are at least 20MB, some over 100MB.
For improved user experience, I upload via JavaScript and XMLHttpRequest, this way I can display upload speed and remaining time.
And to avoid and trouble on the server (such as requests timing out and taking too long to process) I submit the file in little packages on the server, and have a php script re-assemble the file.
My script works great, with one weird catch - and until just now I thought it was because of my ISP.
Using Google chrome I can upload files up to 20MB with no problems. But anything larger gets errors: For example my 100MB file will not send anything to the server - the second package never arrives. On my 50MB file it happens after around 47%, with the 7th package. And another file doesn't even send the first package.
I restarted my computer, and it keeps happening at the same position/package number for each file - though the position has nothing in common compared to the other failed files.
It doesn't matter if you try to start after one of the failed packages, say If I start at #8 if 7 failed - it will continue to fail. If I ignore errors (rather than to try again) it will just send the rest of the file in empty chunks.
I had already tried from a different internet connection, though I had to use firefox there. And it worked fine. So I install firefox on my machiene, and BAM works like a charm, correctly sending the 100MB file.
What could be going wrong on Chrome?
$(document).on('click','#video_upload',function(evt){
uploadProcess('vod_video_file');
});
function toBlob(text)
{
var data = new ArrayBuffer(text.length);
var ui8a = new Uint8Array(data, 0);
for (var i = 0; i < text.length; i++) ui8a[i] = (text.charCodeAt(i) & 0xff);
if(typeof window.Blob == "function")
{
var blob = new Blob([data]);
}else{
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(data);
var blob = bb.getBlob();
}
return blob;
}
function splitFile(dataArray, size) {
blobs = new Array();
for (var i = 0; i < dataArray.size; i += size)
{
var copy = dataArray.slice();
var partial = copy.slice(i, i+size);
blobs.push(partial);
}
return blobs;
}
function uploadProcess(fileInputId)
{
var file = document.getElementById(fileInputId).files[0];
var reader = new FileReader();
reader.readAsBinaryString(file);
reader.onloadend = function(evt)
{
var fr = evt.target.result;
fileUpload( fr );
}
}
function fileUpload(inputDataArray)
{
var since;
var intervalid;
var totalBytes = inputDataArray.length;
var packets = new Array();
var packetNum = 0;
var packetCount = 0;
var packetSize = 0;
function startUpload()
{
intervalid = setInterval(function(){updateUploadStats();},1000);
calculatePaketSize()
createPackets();
submitPacket();
}
function calculatePaketSize()
{
var ideal_size = 3*1024*1024;
var packet_count = Math.ceil( totalBytes/ideal_size);
packetSize = Math.ceil(totalBytes/packet_count);
}
function createPackets()
{
packets = splitFile(toBlob(inputDataArray), packetSize)
packetCount = packets.length;
}
function updateUploadStats(e)
{
//displaying upload progress in GUI
}
function submitPacket()
{
xhr = new XMLHttpRequest();
xhr.open("POST", 'index.php?controller=AdminVodVideo&action=VideoUpload&ajax=1&r='+packetNum+'&token='+token, true);
xhr.setRequestHeader("Content-type","application/octet-stream");
XMLHttpRequest.prototype.mySendAsBinary = function(text){
this.send(text);
}
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", function(e) {
updateUploadStats(e);
});
xhr.onreadystatechange = function()
{
if(xhr.readyState == 4)
{
if(xhr.status == 200)
{
//server will return the string 'upload failed' if the file to be received was empty.
if( xhr.responseText == 'upload failed')
{
console.log('FAILED , trying again in 3 s');
setTimeout(submitPacket,3000);
}
else
{
updateUploadStats();
packetNum++;
if(packetNum == packetCount)
{
processOnServer();
}
else
{
submitPacket();
}
}
}else{
// process error
console.log('we got a 500 error');
}
}
};
since = Date.now();
xhr.mySendAsBinary( packets[packetNum] );
}
function processOnServer()
{
//telling the server to piece the file back together.
}
startUpload();
}
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>

How to Asynchronously write an ArrayBuffer directly to file using nsIArrayBufferInputStream in Firefox extension

To make a long story short:
How to Asynchronously write an ArrayBuffer directly to file using nsIArrayBufferInputStream in Firefox extension ?
It seems that MDN does not have any documentation on nsIArrayBufferInputStream.
I know I can use nsIStringInputStream and convert the BufferArray to String, but this poses a big performance hit
also converting ArrayBuffer to string using this code:
String.fromCharCode.apply(null, new Uint16Array(buf));
Does not work if the buffer is 500 KB or bigger, so we must loop over it one char at a time:
for (let i = 0; i < buf.length; i++){
s += String.fromCharCode(buf16[i]);
}
Or, I can use nsIBinaryOutputStream.writeByteArray but it cannot be used with NetUtil.asyncCopy (or can it?)
//this works ok, but is synchronous :-(
function writeBinFile(aFile, data){
Components.utils.import("resource://gre/modules/FileUtils.jsm");
let nsFile = Components.Constructor("#mozilla.org/file/local;1", Ci.nsILocalFile, "initWithPath");
if(typeof aFile === 'string') aFile = nsFile(aFile);
var stream = FileUtils.openSafeFileOutputStream(aFile, FileUtils.MODE_WRONLY | FileUtils.MODE_CREATE);
var binaryStream = Cc["#mozilla.org/binaryoutputstream;1"].createInstance(Ci.nsIBinaryOutputStream);
binaryStream.setOutputStream(stream);
binaryStream.writeByteArray(data, data.length);
FileUtils.closeSafeFileOutputStream(stream);
}
And the long story is...
I have been trying to use nsIArrayBufferInputStream
http://dxr.mozilla.org/mozilla-central/source/netwerk/base/public/nsIArrayBufferInputStream.idl
but with no success. the code I tried:
function fileWrite(file, data, callback) {
Cu.import("resource://gre/modules/FileUtils.jsm");
Cu.import("resource://gre/modules/NetUtil.jsm");
let nsFile = Components.Constructor("#mozilla.org/file/local;1", Ci.nsILocalFile, "initWithPath");
if (typeof file == 'string') file = new nsFile(file);
let ostream = FileUtils.openSafeFileOutputStream(file)
let istream = Cc["#mozilla.org/io/arraybuffer-input-stream;1"].createInstance(Ci.nsIArrayBufferInputStream);
istream.setData(data, 0, data.length);
let bstream = Cc["#mozilla.org/binaryinputstream;1"].createInstance(Ci.nsIBinaryInputStream);
bstream.setInputStream(istream);
//NetUtil.asyncCopy(istream, ostream,
NetUtil.asyncCopy(bstream, ostream,
function(status) {
if (callback) callback(Components.isSuccessCode(status));
}
);
}
The ArrayBuffer data param is the responce from XMLHttpRequest:
function getBinFile(url, dir) {
let nsFile = Components.Constructor("#mozilla.org/file/local;1", Ci.nsILocalFile, "initWithPath");
let oReq = new XMLHttpRequest();
oReq.open("GET", url, true);
oReq.responseType = "arraybuffer";
oReq.onload = function(oEvent) {
var arrayBuffer = oReq.response;
if (arrayBuffer) {
//let byteArray = new Uint8Array(arrayBuffer);
let byteArray = arrayBuffer;
dir = /\\$/.test(dir) ? dir: dir + '\\';
let file = nsFile(dir + decodeURIComponent(url.split('/').pop()));
fileWrite(file, byteArray);
}
};
oReq.send(null);
}
calling like this:
getBinFile( 'http://....', 'c:\\demo\\');
A file is created but with no contents!
I'm answering myself in case anyone stumbles upon this question...
with help from Josh Matthews (of Mozilla) i found the answer:
use byteLength instead of length
istream.setData(data, 0, data.byteLength);

Why is my file data being lost in transfer?

Ok, so I found this method for uploading large files from a diferent question:
function sliceit(file)
{
fr = new FileReader;
chunkSize = 1000000;
chunks = Math.ceil(file.size / chunkSize);
chunk = 0;
var isstart = true;
function loadNext() {
start, end,
start = chunk * chunkSize;
if (start > file.size)
start = end+1;
end = start + (chunkSize -1) >= file.size ? file.size : start + (chunkSize -1);
fr.onloadend = function(e)
{
while(!fr.result || fr.result == "" || fr.result == undefined)
{
}
fdata = fr.result;
xmlhttp, form, data;
data = {};
data.filename = document.getElementById("filename0").value;
data.username = document.getElementById("username").value;
data.password = document.getElementById("password").value;
data.public = document.getElementById("public").value;
if(isstart)
{
var form = new FormData();
form.append("filename", data.filename);
form.append("username", data.username);
form.append("password", data.password);
form.append("public", data.public);
form.append("filebytes", fdata);
xmlhttp = jQuery.ajaxSettings.xhr();
xmlhttp.open("POST", "../MakeEmptyFile.php", false);
isstart = false;
}
else
{
form = new FormData();
form.append("filename", data.filename);
form.append("username", data.username);
form.append("password", data.password);
form.append("filebytes", fdata);
xmlhttp = jQuery.ajaxSettings.xhr();
xmlhttp.open("POST", "../AddTo.php", false);
}
xmlhttp.send(form);
console.log(xmlhttp.responseText);
if (++chunk <= chunks)
{
loadNext();
}
else
{
stdlog("Completed.");
}
};
fr.readAsText(file.slice(start, end));
}
loadNext();
}
For some reason, every time I load the XMLHttpRequest, it shows that the file data was never actually sent. Is this because it was too big, or something? Thanks!
It sounds like you are missing some basic concepts related to the File API. My answer here feeds off of the comments from your question.
First off, the proper way to upload a file in chunks is to use the slice method on the Blob or File object. The slice method will always return another Blob. This Blob represents the portion of the Blob or File you have carved out. You can then append that Blob to your FormData object. Your FormData object will be sent in a multipart-encoded POST request. The Blob will be represented inside of one of the multipart boundaries in the request as a form field, just like any other parameters you have appended to your FormData object.
Server-side, you would read this Blob just as you would read any other file when parsing a MPE request. This Blob will need to be temporarily stored server-side, along with any other Blobs that make up the complete file. Once you have received all parts, you will need to assemble them in the correct order server-side.
Your current method involves slicing the file into blobs, reading the entire contents of the blob, and then attaching the contents to your FormData object. This seems redundant and inefficient to me.

Is it possible to save a File object in LocalStorage and then reload a File via FileReader when a user comes back to a page?

For example, say the user loads some very large images or media files in to your web app. When they return you want your app to show what they've previously loaded, but can't keep the actual file data in LocalStorage because the data is too large.
This is NOT possible with localStorage. Data stored in localStorage needs to be one of the primitive types that can be serializable. This does not include the File object.
For example, this will not work as you'd expect:
var el = document.createElement('input');
el.type='file';
el.onchange = function(e) {
localStorage.file = JSON.stringify(this.files[0]);
// LATER ON...
var reader = new FileReader();
reader.onload = function(e) {
var result = this.result; // never reaches here.
};
reader.readAsText(JSON.parse(localStorage.f));
};
document.body.appendChild(el);
The solution is to use a more powerful storage option like writing the file contents to the HTML5 Filesystem or stashing it in IndexedDB.
Technically you can if you just need to save small files in localStorage.
Just base64 that ish and since it's a string... it's localStorage-friendly.
I think localStorage has a ~5MB limit. base64 strings are pretty low file size so this is a feasible way to store small images. If you use this lazy man's way, the downside is you'll have to mind the 5MB limit. I think it could def be a solution depending on your needs.
Yes, this is possible. You can insert whatever information about the file you want into LocalStorage, provided you serialize it to one of the primitive types supported. You can also serialize the whole file into LocalStorage and retrieve that later if you want, but there are limitations on the size of the file depending on browser.
The following shows how to achieve this using two different approaches:
(function () {
// localStorage with image
var storageFiles = JSON.parse(localStorage.getItem("storageFiles")) || {},
elephant = document.getElementById("elephant"),
storageFilesDate = storageFiles.date,
date = new Date(),
todaysDate = (date.getMonth() + 1).toString() + date.getDate().toString();
// Compare date and create localStorage if it's not existing/too old
if (typeof storageFilesDate === "undefined" || storageFilesDate < todaysDate) {
// Take action when the image has loaded
elephant.addEventListener("load", function () {
var imgCanvas = document.createElement("canvas"),
imgContext = imgCanvas.getContext("2d");
// Make sure canvas is as big as the picture
imgCanvas.width = elephant.width;
imgCanvas.height = elephant.height;
// Draw image into canvas element
imgContext.drawImage(elephant, 0, 0, elephant.width, elephant.height);
// Save image as a data URL
storageFiles.elephant = imgCanvas.toDataURL("image/png");
// Set date for localStorage
storageFiles.date = todaysDate;
// Save as JSON in localStorage
try {
localStorage.setItem("storageFiles", JSON.stringify(storageFiles));
}
catch (e) {
console.log("Storage failed: " + e);
}
}, false);
// Set initial image src
elephant.setAttribute("src", "elephant.png");
}
else {
// Use image from localStorage
elephant.setAttribute("src", storageFiles.elephant);
}
// Getting a file through XMLHttpRequest as an arraybuffer and creating a Blob
var rhinoStorage = localStorage.getItem("rhino"),
rhino = document.getElementById("rhino");
if (rhinoStorage) {
// Reuse existing Data URL from localStorage
rhino.setAttribute("src", rhinoStorage);
}
else {
// Create XHR, BlobBuilder and FileReader objects
var xhr = new XMLHttpRequest(),
blob,
fileReader = new FileReader();
xhr.open("GET", "rhino.png", true);
// Set the responseType to arraybuffer. "blob" is an option too, rendering BlobBuilder unnecessary, but the support for "blob" is not widespread enough yet
xhr.responseType = "arraybuffer";
xhr.addEventListener("load", function () {
if (xhr.status === 200) {
// Create a blob from the response
blob = new Blob([xhr.response], {type: "image/png"});
// onload needed since Google Chrome doesn't support addEventListener for FileReader
fileReader.onload = function (evt) {
// Read out file contents as a Data URL
var result = evt.target.result;
// Set image src to Data URL
rhino.setAttribute("src", result);
// Store Data URL in localStorage
try {
localStorage.setItem("rhino", result);
}
catch (e) {
console.log("Storage failed: " + e);
}
};
// Load blob as Data URL
fileReader.readAsDataURL(blob);
}
}, false);
// Send XHR
xhr.send();
}
})();
Source

Categories

Resources