HTML 5 File API 0x80004003 - javascript

Hi i am using the JS HTML5 File API to handle file uploads to my server.
I am getting the following error in Aurora(Fire Fox Bleeding edge builds)
NS_ERROR_INVALID_POINTER: Component returned failure code: 0x80004003 (NS_ERROR_INVALID_POINTER) [nsIDOMFileReader.readAsBinaryString]
function readBlob(opt_startByte, opt_stopByte,file,partNo) {
var start = parseInt(opt_startByte);
var stop = parseInt(opt_stopByte);
var reader = new FileReader();
var totalParts = parseInt(file.size/MAX_READ);
if((file.size % MAX_READ) !== 0){
totalParts++;
}
// If we use onloadend, we need to check the readyState.
reader.onloadend = function(evt) {
if (evt.target.readyState == FileReader.DONE) {
//var contents = reader.result;
postFilePart(partNo,contents,totalParts,escape(file.name))// DONE == 2
}
};
if (file.webkitSlice) {
var blob = file.webkitSlice(start, stop);
} else if (file.mozSlice) {
var blob = file.mozSlice(start, stop);
}
reader.readAsBinaryString(blob);
}
the error is occurring at this line
reader.readAsBinaryString(blob);
i have tried mozSlice and Slice
if (file.mozSlice) {
var blob = file.mozSlice(start, stop);
}
and it gave me the same results. it might not be the best idea to use HTML 5 API yet as this may cause issues with other browsers as well.
does anyone have a work around to get the same functionality or how i can resolve this particular error

Solved the issue it it was rerunning the reader code with incorrect parameters due to a mistake on the calling method
https://bugzilla.mozilla.org/show_bug.cgi?id=725289
rather use slice vs mozSlice

Related

Migrate FileReader ReadAsBinaryString() to ReadAsArrayBuffer() or ReadAsText()

I realize that the new Mozilla Firefox return allocation size overflow (on FileReader.ReadAsBinaryString()) when the file bigger than 200MB (something like that).
Here's some of my code on test for client web browser:
function upload(fileInputId, fileIndex)
{
var file = document.getElementById(fileInputId).files[fileIndex];
var blob;
var reader = new FileReader();
reader.readAsBinaryString(file);
reader.onloadend = function(evt)
{
xhr = new XMLHttpRequest();
xhr.open("POST", "upload.php", true);
XMLHttpRequest.prototype.mySendAsBinary = function(text){
var data = new ArrayBuffer(text.length);
var ui8a = new Uint8Array(data, 0);
for (var i = 0; i < text.length; i++){
ui8a[i] = (text.charCodeAt(i) & 0xff);
}
if(typeof window.Blob == "function")
{
blob = new Blob([data]);
}else{
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(data);
blob = bb.getBlob();
}
this.send(blob);
}
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", function(e) {
var position = e.position || e.loaded;
var total = e.totalSize || e.total;
var percentage = Math.round((position/total)*100);
});
xhr.onreadystatechange = function()
{
if(xhr.readyState == 4)
{
if(xhr.status == 200)
{
console.log("Done");
}else{
console.log("Fail");
}
}
};
xhr.mySendAsBinary(evt.target.result);
};
}
So I tried change it to FileReader.ReadAsArrayBuffer(), the error has not shown up but the data are not the same (as it's not read as binary string).
Did anyone has any solution to solve this problem? Is there any way that we can upload bigger file from JS to Web Server in raw/string other than FileReader implementation?
I read on Mozilla JS Documentation that said:
This feature is non-standard and is not on a standards track. Do not
use it on production sites facing the Web: it will not work for every
user. There may also be large incompatibilities between
implementations and the behavior may change in the future. - Mozilla
If not ReadAsBinaryString, the how to implement ReadAsArrayBuffer or ReadAsText
To send Files to a web-server, you simply don't need js. HTML alone is well able to do this with the <form> element.
Now if you want to go through js, for e.g catch the different ProgressEvents, then you can send directly your File, no need to read it whatsoever on your side.
To do this, you've got two (or three) solutions.
If your server is able to handle PUT requests, you can simply xhr.send(file);.
Otherwise, you'd have to go through a FormData.
// if you really want to go the XHR way
document.forms[0].onsubmit = function handleSubmit(evt) {
if(!window.FormData) { // old browser use the <form>
return;
}
// now we handle the submit through js
evt.preventDefault();
var fD = new FormData(this);
var xhr = new XMLHttpRequest();
xhr.onprogress = function handleProgress(evt){};
xhr.onload = function handleLoad(evt){};
xhr.onerror = function handleError(evt){};
xhr.open(this.method, this.action);
// xhr.send(fD); // won't work in StackSnippet
log(fD, this.method, this.action); // so we just log its content
};
function log(formData, method, action) {
console.log('would have sent');
for(let [key, val] of formData.entries())
console.log(key, val);
console.log('through', method);
console.log('to', action);
}
<!-- this in itself is enough -->
<form method="POST" action="your_server.page">
<input type="file" name="file_upload">
<input type="submit">
</form>
Now, you sent a comment saying that you can't upload Files bigger than 1GB to your server.
This limitation is only due to your server's config, so the best if you want to accept such big files is to configure it correctly.
But if you really want to send your File by chunks, even then don't get off of the Blob interface.
Indeed Blobs have a slice() method, so use it.
document.forms[0].onsubmit = function handleSubmit(evt) {
evt.preventDefault();
var file = this.elements[0].files[0];
var processed = 0;
if(file) {
// var MAX_CHUNK_SIZE = Math.min(file.size, server_max_size);
// for demo we just split in 10 chunks
var MAX_CHUNK_SIZE = file.size > 10 ? (file.size / 10) | 0 : 1;
loadChunk(0);
}
function loadChunk(start) {
var fD = new FormData();
var sliced = file.slice(start, start+MAX_CHUNK_SIZE);
processed += sliced.size; // only for demo
fD.append('file_upload', sliced, file.name);
fD.append('starting_index', start);
if(start + MAX_CHUNK_SIZE >= file.size) {
fD.append('last_chunk', true);
}
var xhr = new XMLHttpRequest();
xhr.open('POST', 'your_server.page');
xhr.onload = function onchunkposted(evt) {
if(start + MAX_CHUNK_SIZE >= file.size) {
console.log('All done. Original file size: %s, total of chunks sizes %s', file.size, processed);
return;
}
loadChunk(start + MAX_CHUNK_SIZE);
};
// xhr.send(fD);
log(fD);
setTimeout(xhr.onload, 200); // fake XHR onload
}
};
function log(formData, method, action) {
console.log('would have sent');
for(let [key, val] of formData.entries())
console.log(key, val);
}
<form method="POST" action="your_server.page">
<input type="file" name="file_upload">
<input type="submit">
</form>
But you absolutely don't need to go through a FileReader for this operation.
Actually the only case where it could make sense to use a FileReader here would be for some Android browsers that don't support passing Blob into a FormData, even though they don't give a single clue about it.
So in this case, you'd have to set up your server to let you know the request was empty, and then only read the File as a dataURI that you would send in-place of the original File.
after a long week of research and sleepless nights, you can't upload binary strings without breaking it, also base64 doesn't work for all files, only images, the journey from the client-side to the server breaks the bytes being sent
Kaiido statement is correct
To send Files to a web-server, you simply don't need js
But that doesn't answer my question. Using the Simple XMLHttpRequest() can upload the file and track those progress as well. But still, it's not it. The direct upload, either from the <form> or using XMLHttpRequest() will need to increase your upload limit in php setting. This method is not convenience for me. How if the client upload file as 4GB? So I need to increase to 4GB. Then next time, client upload file as 6GB, then I have to increase to 6GB.
Using the slice() method is make sense for bigger file as we can send it part by part to server. But this time I am not using it yet.
Here's some of my test the worked as I want. I hope some expert could correct me if I am wrong.
My Upload.js
function upload(fileInputId, fileIndex)
{
var file = document.getElementById(fileInputId).files[fileIndex];
var blob;
var reader = new FileReader();
reader.readAsArrayBuffer(file);
reader.onloadend = function(evt)
{
xhr = new XMLHttpRequest();
xhr.open("POST", "upload.php?name=" + base64_encode(file.name), true);
XMLHttpRequest.prototype.mySendAsBinary = function(text){
var ui8a = new Uint8Array(new Int8Array(text));
if(typeof window.Blob == "function")
{
blob = new Blob([ui8a]);
}else{
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(ui8a);
blob = bb.getBlob();
}
this.send(blob);
}
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", function(e) {
var position = e.position || e.loaded;
var total = e.totalSize || e.total;
var percentage = Math.round((position/total)*100);
console.log(percentage);
});
xhr.onreadystatechange = function()
{
if(xhr.readyState == 4)
{
if(xhr.status == 200)
{
console.log("Done");
}else{
console.log("Fail");
}
}
};
xhr.mySendAsBinary(evt.target.result);
};
}
Below is how the PHP server listen to the ArrayBuffer from JS
if(isset($_GET["name"])){
$name = base64_decode($_GET["name"]);
$loc = $name;
$inputHandler = fopen('php://input', "r");
$fileHandler = fopen($loc, "w+");
while(true) {
//$buffer = fgets($inputHandler, 1024);
$buffer = fread($inputHandler, 1000000);
if (strlen($buffer) == 0) {
fclose($inputHandler);
fclose($fileHandler);
return true;
}
//$b = base64_encode($buffer);
fwrite($fileHandler, $buffer);
}
}
The above method works well. The FileReader read the file as ArrayBuffer the upload to server. For me, migrating from ReadAsBinaryString() to ReadAsArrayBuffer() is important and ReadAsArrayBuffer() has some better performance rather than ReadAsBinaryString()
Here's some reason, why some developer relies to FileReader API:
Streaming. Using this method, the file will be stream, so we can avoid setting the php multiple time.
Easy Encrypt. As the file is sending via ArrayBuffer, it is easy for developer to Encrypt the file while upload in progress.
This method also support upload any type of file. I ve done some test and I realize that ReadAsArrayBuffer() method are more faster than ReadAsBinaryString() and direct form upload. You may try it.
Security Notice
The above code is only under test code, to use it in production, you have to consider sending the data in GET or POST under HTTPS.

Javascript FileReader doesn't fire events on large files

I'm trying to play the video files from and on client's computer. The thing is it's pretty difficult to get the absolute path so I can put it in video's src or object's data attribute. Eventually I find the FileReader object, and it works on small files pretty well. However, it doesn't fire the onload event when reading large files (200MB+ so far). No error, FileReader.onerror doesn't fire anything, try/catch doesn't help. Developer console doesn't show anything.
I think it has something to do with max file size on each browser's config, but I can't find the way to configure that. Help?
Here's the code
function onclick()
{
var file_dialog = document.getElementById("file_dialog");
var path_dialog = document.getElementById("path_dialog");
var video_player = document.getElementById("video_player");
var begin_video = document.getElementById("begin_video");
var reading_progress = document.getElementById("reading_progress");
file_dialog.onchange = function ()
{
begin_video.disabled = file_dialog.files.length == 0 || path_dialog.files.length == 0;
};
path_dialog.onchange = file_dialog.onchange;
begin_video.onclick = function ()
{
begin_video.disabled = true;
var reader = new FileReader();
reader.onload = function (e)
{
video_player.src = e.target.result;
begin_video.disabled = false;
};
reader.onprogress = function (e)
{
reading_progress.textContent = "Reading... " + (Math.floor(e.loaded / e.total * 10000) / 100) + "%";
};
reader.onloadend = function (e)
{
if (e.target.error != null)
reading_progress.textContent = e.target.error.code;
else
reading_progress.textContent = "FINISHED!!!";
};
reader.onerror = alert;
reader.readAsDataURL(file_dialog.files[0]);
var reader2 = new FileReader();
reader2.onload = function (e)
{
};
};
}
There is no such browser config setting for this.
I've also worked with FileReader and large files (up to 50 MB) and the browsers behave very different:
Chrome => did well and was the most "responsive"
Firefox => did not as well as Chrome, high memory consumption, but worked
IE => worked as long the file was below 15 MB, above the browser just didn't process the file - no feedback, didn't fire any event
Maybe it's a memory issue - tested same files with different machine with less memory and IE denied to work already at 5 MB files.

javascript to read file selects a file but doesn't read it

My script selects a file... but doesn't read it. I've been banging my head on it but can't make it work. It's part of my studies, I'm a greenhorn and I'm lost.
function readBlob() {
var files = document.getElementById('files').files;
if (!files.length) {
alert('Please select a file!');
return;
var file = files[0];
var start = 0;
var stop = file.size;
var reader = new FileReader();
if (file.webkitSlice) {
var blob = file.webkitSlice(start, stop);
//Creates new blob if using google chrome
} else if (file.mozSlice) {
var blob = file.mozSlice(start, stop);
//Creates new blob if using mozilla firefox
}
//read the contents of the file in as text into the blob
reader.readAsText(blob);
reader.onloadend = function(evt) {
if (evt.target.readyState == FileReader.DONE) {
document.getElementById('byte_content').textContent =
evt.target.result;
}
};
}
}
Seems like a simple syntax error to me, but maybe just an error inserting it into stack overflow. The entire thing, the slicing of the file, the insertion into the document, everything, is inside of the if (!files.length) statement. Therefore, the script only executes when there is no file (catching on to the problem yet :) but it is actually meant to do the opposite. All of the important stuff is supposed to be outside of the if statement.

How to change emscripten browser input method from window.prompt to something more sensible?

I have a C++ function which once called consumes input from stdin. Exporting this function to javascript using emscripten causes calls to window.prompt.
Interacting with browser prompt is really tedious task. First of all you can paste only one line at time. Secondly the only way to indicate EOF is by pressing 'cancel'. Last but not least the only way (in case of my function) to make it stop asking user for input by window.prompt is by checking the checkbox preventing more prompts to pop up.
For me the best input method would be reading some blob. I know I can hack library.js but I see some problems:
Reading blob is asynchronous.
To read a blob, first you have to open a file user has to select first.
I don't really know how to prevent my function from reading this blob forever - there is no checkbox like with window.prompt and I'm not sure if spotting EOF will stop it if it didn't in window.prompt case (only checking a checkbox helped).
The best solution would be some kind of callback but I would like to see sime hints from more experienced users.
A way would be to use the Emscripten Filesystem API, for example by calling FS.init in the Module preRun function, passing a custom function as the standard input.
var Module = {
preRun: function() {
function stdin() {
// Return ASCII code of character, or null if no input
}
var stdout = null; // Keep as default
var stderr = null; // Keep as default
FS.init(stdin, stdout, stderr);
}
};
The function is quite low-level: is must deal with one character at a time. To read some data from a blob, you could do something like:
var data = new Int8Array([1,2,3,4,5]);
var blob = new Blob([array], {type: 'application/octet-binary'});
var reader = new FileReader();
var result;
reader.addEventListener("loadend", function() {
result = new Int8Array(reader.result);
});
var i = 0;
var Module = {
preRun: function() {
function stdin() {
if (if < result.byteLength {
var code = result[i];
++i;
return code;
} else {
return null;
}
}
var stdout = null; // Keep as default
var stderr = null; // Keep as default
FS.init(stdin, stdout, stderr);
}
};
Note (as you have hinted), due to the asynchronous nature of the reader, there could be a race condition: the reader must have loaded before you can expect the data at the standard input. You might need to implement some mechanism to avoid this in a real case. Depending on your exact requirements, you could make it so the Emscripten program doesn't actually call main() until you have the data:
var fileRead = false;
var initialised = false;
var result;
var array = new Int8Array([1,2,3,4,5]);
var blob = new Blob([array], {type: 'application/octet-binary'});
var reader = new FileReader();
reader.addEventListener("loadend", function() {
result = new Int8Array(reader.result);
fileRead = true;
runIfCan();
});
reader.readAsArrayBuffer(blob);
var i = 0;
var Module = {
preRun: function() {
function stdin() {
if (i < result.byteLength)
{
var code = result[i];
++i;
return code;
} else{
return null;
}
}
var stdout = null;
var stderr = null;
FS.init(stdin, stdout, stderr);
initialised = true;
runIfCan();
},
noInitialRun: true
};
function runIfCan() {
if (fileRead && initialised) {
// Module.run() doesn't seem to work here
Module.callMain();
}
}
Note: this is a version of my answer at Providing stdin to an emscripten HTML program? , but with focus on the standard input, and adding parts about passing data from a Blob.
From what I understand you could try the following:
Implement selecting a file in Javascript and access it via Javascript Blob interface.
Allocate some memory in Emscripten
var buf = Module._malloc( blob.size );
Write the content of your Blob into the returned memory location from Javascript.
Module.HEAPU8.set( new Uint8Array(blob), buf );
Pass that memory location to a second Emscripten compiled function, which then processes the file content and
Deallocate allocated memory.
Module._free( buf );
Best to read the wiki first.

Reading file contents on the client-side in javascript in various browsers

I'm attempting to provide a script-only solution for reading the contents of a file on a client machine through a browser.
I have a solution that works with Firefox and Internet Explorer. It's not pretty, but I'm only trying things at the moment:
function getFileContents() {
var fileForUpload = document.forms[0].fileForUpload;
var fileName = fileForUpload.value;
if (fileForUpload.files) {
var fileContents = fileForUpload.files.item(0).getAsBinary();
document.forms[0].fileContents.innerHTML = fileContents;
} else {
// try the IE method
var fileContents = ieReadFile(fileName);
document.forms[0].fileContents.innerHTML = fileContents;
}
}
function ieReadFile(filename)
{
try
{
var fso = new ActiveXObject("Scripting.FileSystemObject");
var fh = fso.OpenTextFile(filename, 1);
var contents = fh.ReadAll();
fh.Close();
return contents;
}
catch (Exception)
{
return "Cannot open file :(";
}
}
I can call getFileContents() and it will write the contents into the fileContents text area.
Is there a way to do this in other browsers?
I'm most concerned with Safari and Chrome at the moment, but I'm open to suggestions for any other browser.
Edit: In response to the question, "Why do you want to do this?":
Basically, I want to hash the file contents together with a one-time-password on the client side so I can send this information back as a verification.
Edited to add information about the File API
Since I originally wrote this answer, the File API has been proposed as a standard and implemented in most browsers (as of IE 10, which added support for FileReader API described here, though not yet the File API). The API is a bit more complicated than the older Mozilla API, as it is designed to support asynchronous reading of files, better support for binary files and decoding of different text encodings. There is some documentation available on the Mozilla Developer Network as well as various examples online. You would use it as follows:
var file = document.getElementById("fileForUpload").files[0];
if (file) {
var reader = new FileReader();
reader.readAsText(file, "UTF-8");
reader.onload = function (evt) {
document.getElementById("fileContents").innerHTML = evt.target.result;
}
reader.onerror = function (evt) {
document.getElementById("fileContents").innerHTML = "error reading file";
}
}
Original answer
There does not appear to be a way to do this in WebKit (thus, Safari and Chrome). The only keys that a File object has are fileName and fileSize. According to the commit message for the File and FileList support, these are inspired by Mozilla's File object, but they appear to support only a subset of the features.
If you would like to change this, you could always send a patch to the WebKit project. Another possibility would be to propose the Mozilla API for inclusion in HTML 5; the WHATWG mailing list is probably the best place to do that. If you do that, then it is much more likely that there will be a cross-browser way to do this, at least in a couple years time. Of course, submitting either a patch or a proposal for inclusion to HTML 5 does mean some work defending the idea, but the fact that Firefox already implements it gives you something to start with.
In order to read a file chosen by the user, using a file open dialog, you can use the <input type="file"> tag. You can find information on it from MSDN. When the file is chosen you can use the FileReader API to read the contents.
function onFileLoad(elementId, event) {
document.getElementById(elementId).innerText = event.target.result;
}
function onChooseFile(event, onLoadFileHandler) {
if (typeof window.FileReader !== 'function')
throw ("The file API isn't supported on this browser.");
let input = event.target;
if (!input)
throw ("The browser does not properly implement the event object");
if (!input.files)
throw ("This browser does not support the `files` property of the file input.");
if (!input.files[0])
return undefined;
let file = input.files[0];
let fr = new FileReader();
fr.onload = onLoadFileHandler;
fr.readAsText(file);
}
<input type='file' onchange='onChooseFile(event, onFileLoad.bind(this, "contents"))' />
<p id="contents"></p>
There's a modern native alternative: File implements Blob, so we can call Blob.text().
async function readText(event) {
const file = event.target.files.item(0)
const text = await file.text();
document.getElementById("output").innerText = text
}
<input type="file" onchange="readText(event)" />
<pre id="output"></pre>
Currently (September 2020) this is supported in Chrome and Firefox, for other Browser you need to load a polyfill, e.g. blob-polyfill.
Happy coding!
If you get an error on Internet Explorer, Change the security settings to allow ActiveX
var CallBackFunction = function(content) {
alert(content);
}
ReadFileAllBrowsers(document.getElementById("file_upload"), CallBackFunction);
//Tested in Mozilla Firefox browser, Chrome
function ReadFileAllBrowsers(FileElement, CallBackFunction) {
try {
var file = FileElement.files[0];
var contents_ = "";
if (file) {
var reader = new FileReader();
reader.readAsText(file, "UTF-8");
reader.onload = function(evt) {
CallBackFunction(evt.target.result);
}
reader.onerror = function(evt) {
alert("Error reading file");
}
}
} catch (Exception) {
var fall_back = ieReadFile(FileElement.value);
if (fall_back != false) {
CallBackFunction(fall_back);
}
}
}
///Reading files with Internet Explorer
function ieReadFile(filename) {
try {
var fso = new ActiveXObject("Scripting.FileSystemObject");
var fh = fso.OpenTextFile(filename, 1);
var contents = fh.ReadAll();
fh.Close();
return contents;
} catch (Exception) {
alert(Exception);
return false;
}
}
This works fine
function onClick(event) {
filecontent = "";
var myFile = event.files[0];
var reader = new FileReader();
reader.addEventListener('load', function (e) {
filecontent = e.target.result;
});
reader.readAsBinaryString(myFile);
}

Categories

Resources