Ok, so I found this method for uploading large files from a diferent question:
function sliceit(file)
{
fr = new FileReader;
chunkSize = 1000000;
chunks = Math.ceil(file.size / chunkSize);
chunk = 0;
var isstart = true;
function loadNext() {
start, end,
start = chunk * chunkSize;
if (start > file.size)
start = end+1;
end = start + (chunkSize -1) >= file.size ? file.size : start + (chunkSize -1);
fr.onloadend = function(e)
{
while(!fr.result || fr.result == "" || fr.result == undefined)
{
}
fdata = fr.result;
xmlhttp, form, data;
data = {};
data.filename = document.getElementById("filename0").value;
data.username = document.getElementById("username").value;
data.password = document.getElementById("password").value;
data.public = document.getElementById("public").value;
if(isstart)
{
var form = new FormData();
form.append("filename", data.filename);
form.append("username", data.username);
form.append("password", data.password);
form.append("public", data.public);
form.append("filebytes", fdata);
xmlhttp = jQuery.ajaxSettings.xhr();
xmlhttp.open("POST", "../MakeEmptyFile.php", false);
isstart = false;
}
else
{
form = new FormData();
form.append("filename", data.filename);
form.append("username", data.username);
form.append("password", data.password);
form.append("filebytes", fdata);
xmlhttp = jQuery.ajaxSettings.xhr();
xmlhttp.open("POST", "../AddTo.php", false);
}
xmlhttp.send(form);
console.log(xmlhttp.responseText);
if (++chunk <= chunks)
{
loadNext();
}
else
{
stdlog("Completed.");
}
};
fr.readAsText(file.slice(start, end));
}
loadNext();
}
For some reason, every time I load the XMLHttpRequest, it shows that the file data was never actually sent. Is this because it was too big, or something? Thanks!
It sounds like you are missing some basic concepts related to the File API. My answer here feeds off of the comments from your question.
First off, the proper way to upload a file in chunks is to use the slice method on the Blob or File object. The slice method will always return another Blob. This Blob represents the portion of the Blob or File you have carved out. You can then append that Blob to your FormData object. Your FormData object will be sent in a multipart-encoded POST request. The Blob will be represented inside of one of the multipart boundaries in the request as a form field, just like any other parameters you have appended to your FormData object.
Server-side, you would read this Blob just as you would read any other file when parsing a MPE request. This Blob will need to be temporarily stored server-side, along with any other Blobs that make up the complete file. Once you have received all parts, you will need to assemble them in the correct order server-side.
Your current method involves slicing the file into blobs, reading the entire contents of the blob, and then attaching the contents to your FormData object. This seems redundant and inefficient to me.
Related
I realize that the new Mozilla Firefox return allocation size overflow (on FileReader.ReadAsBinaryString()) when the file bigger than 200MB (something like that).
Here's some of my code on test for client web browser:
function upload(fileInputId, fileIndex)
{
var file = document.getElementById(fileInputId).files[fileIndex];
var blob;
var reader = new FileReader();
reader.readAsBinaryString(file);
reader.onloadend = function(evt)
{
xhr = new XMLHttpRequest();
xhr.open("POST", "upload.php", true);
XMLHttpRequest.prototype.mySendAsBinary = function(text){
var data = new ArrayBuffer(text.length);
var ui8a = new Uint8Array(data, 0);
for (var i = 0; i < text.length; i++){
ui8a[i] = (text.charCodeAt(i) & 0xff);
}
if(typeof window.Blob == "function")
{
blob = new Blob([data]);
}else{
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(data);
blob = bb.getBlob();
}
this.send(blob);
}
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", function(e) {
var position = e.position || e.loaded;
var total = e.totalSize || e.total;
var percentage = Math.round((position/total)*100);
});
xhr.onreadystatechange = function()
{
if(xhr.readyState == 4)
{
if(xhr.status == 200)
{
console.log("Done");
}else{
console.log("Fail");
}
}
};
xhr.mySendAsBinary(evt.target.result);
};
}
So I tried change it to FileReader.ReadAsArrayBuffer(), the error has not shown up but the data are not the same (as it's not read as binary string).
Did anyone has any solution to solve this problem? Is there any way that we can upload bigger file from JS to Web Server in raw/string other than FileReader implementation?
I read on Mozilla JS Documentation that said:
This feature is non-standard and is not on a standards track. Do not
use it on production sites facing the Web: it will not work for every
user. There may also be large incompatibilities between
implementations and the behavior may change in the future. - Mozilla
If not ReadAsBinaryString, the how to implement ReadAsArrayBuffer or ReadAsText
To send Files to a web-server, you simply don't need js. HTML alone is well able to do this with the <form> element.
Now if you want to go through js, for e.g catch the different ProgressEvents, then you can send directly your File, no need to read it whatsoever on your side.
To do this, you've got two (or three) solutions.
If your server is able to handle PUT requests, you can simply xhr.send(file);.
Otherwise, you'd have to go through a FormData.
// if you really want to go the XHR way
document.forms[0].onsubmit = function handleSubmit(evt) {
if(!window.FormData) { // old browser use the <form>
return;
}
// now we handle the submit through js
evt.preventDefault();
var fD = new FormData(this);
var xhr = new XMLHttpRequest();
xhr.onprogress = function handleProgress(evt){};
xhr.onload = function handleLoad(evt){};
xhr.onerror = function handleError(evt){};
xhr.open(this.method, this.action);
// xhr.send(fD); // won't work in StackSnippet
log(fD, this.method, this.action); // so we just log its content
};
function log(formData, method, action) {
console.log('would have sent');
for(let [key, val] of formData.entries())
console.log(key, val);
console.log('through', method);
console.log('to', action);
}
<!-- this in itself is enough -->
<form method="POST" action="your_server.page">
<input type="file" name="file_upload">
<input type="submit">
</form>
Now, you sent a comment saying that you can't upload Files bigger than 1GB to your server.
This limitation is only due to your server's config, so the best if you want to accept such big files is to configure it correctly.
But if you really want to send your File by chunks, even then don't get off of the Blob interface.
Indeed Blobs have a slice() method, so use it.
document.forms[0].onsubmit = function handleSubmit(evt) {
evt.preventDefault();
var file = this.elements[0].files[0];
var processed = 0;
if(file) {
// var MAX_CHUNK_SIZE = Math.min(file.size, server_max_size);
// for demo we just split in 10 chunks
var MAX_CHUNK_SIZE = file.size > 10 ? (file.size / 10) | 0 : 1;
loadChunk(0);
}
function loadChunk(start) {
var fD = new FormData();
var sliced = file.slice(start, start+MAX_CHUNK_SIZE);
processed += sliced.size; // only for demo
fD.append('file_upload', sliced, file.name);
fD.append('starting_index', start);
if(start + MAX_CHUNK_SIZE >= file.size) {
fD.append('last_chunk', true);
}
var xhr = new XMLHttpRequest();
xhr.open('POST', 'your_server.page');
xhr.onload = function onchunkposted(evt) {
if(start + MAX_CHUNK_SIZE >= file.size) {
console.log('All done. Original file size: %s, total of chunks sizes %s', file.size, processed);
return;
}
loadChunk(start + MAX_CHUNK_SIZE);
};
// xhr.send(fD);
log(fD);
setTimeout(xhr.onload, 200); // fake XHR onload
}
};
function log(formData, method, action) {
console.log('would have sent');
for(let [key, val] of formData.entries())
console.log(key, val);
}
<form method="POST" action="your_server.page">
<input type="file" name="file_upload">
<input type="submit">
</form>
But you absolutely don't need to go through a FileReader for this operation.
Actually the only case where it could make sense to use a FileReader here would be for some Android browsers that don't support passing Blob into a FormData, even though they don't give a single clue about it.
So in this case, you'd have to set up your server to let you know the request was empty, and then only read the File as a dataURI that you would send in-place of the original File.
after a long week of research and sleepless nights, you can't upload binary strings without breaking it, also base64 doesn't work for all files, only images, the journey from the client-side to the server breaks the bytes being sent
Kaiido statement is correct
To send Files to a web-server, you simply don't need js
But that doesn't answer my question. Using the Simple XMLHttpRequest() can upload the file and track those progress as well. But still, it's not it. The direct upload, either from the <form> or using XMLHttpRequest() will need to increase your upload limit in php setting. This method is not convenience for me. How if the client upload file as 4GB? So I need to increase to 4GB. Then next time, client upload file as 6GB, then I have to increase to 6GB.
Using the slice() method is make sense for bigger file as we can send it part by part to server. But this time I am not using it yet.
Here's some of my test the worked as I want. I hope some expert could correct me if I am wrong.
My Upload.js
function upload(fileInputId, fileIndex)
{
var file = document.getElementById(fileInputId).files[fileIndex];
var blob;
var reader = new FileReader();
reader.readAsArrayBuffer(file);
reader.onloadend = function(evt)
{
xhr = new XMLHttpRequest();
xhr.open("POST", "upload.php?name=" + base64_encode(file.name), true);
XMLHttpRequest.prototype.mySendAsBinary = function(text){
var ui8a = new Uint8Array(new Int8Array(text));
if(typeof window.Blob == "function")
{
blob = new Blob([ui8a]);
}else{
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(ui8a);
blob = bb.getBlob();
}
this.send(blob);
}
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", function(e) {
var position = e.position || e.loaded;
var total = e.totalSize || e.total;
var percentage = Math.round((position/total)*100);
console.log(percentage);
});
xhr.onreadystatechange = function()
{
if(xhr.readyState == 4)
{
if(xhr.status == 200)
{
console.log("Done");
}else{
console.log("Fail");
}
}
};
xhr.mySendAsBinary(evt.target.result);
};
}
Below is how the PHP server listen to the ArrayBuffer from JS
if(isset($_GET["name"])){
$name = base64_decode($_GET["name"]);
$loc = $name;
$inputHandler = fopen('php://input', "r");
$fileHandler = fopen($loc, "w+");
while(true) {
//$buffer = fgets($inputHandler, 1024);
$buffer = fread($inputHandler, 1000000);
if (strlen($buffer) == 0) {
fclose($inputHandler);
fclose($fileHandler);
return true;
}
//$b = base64_encode($buffer);
fwrite($fileHandler, $buffer);
}
}
The above method works well. The FileReader read the file as ArrayBuffer the upload to server. For me, migrating from ReadAsBinaryString() to ReadAsArrayBuffer() is important and ReadAsArrayBuffer() has some better performance rather than ReadAsBinaryString()
Here's some reason, why some developer relies to FileReader API:
Streaming. Using this method, the file will be stream, so we can avoid setting the php multiple time.
Easy Encrypt. As the file is sending via ArrayBuffer, it is easy for developer to Encrypt the file while upload in progress.
This method also support upload any type of file. I ve done some test and I realize that ReadAsArrayBuffer() method are more faster than ReadAsBinaryString() and direct form upload. You may try it.
Security Notice
The above code is only under test code, to use it in production, you have to consider sending the data in GET or POST under HTTPS.
I'm testing SAP Leonardo Image Feature Extraction API (https://sandbox.api.sap.com/ml/featureextraction/inference_sync). I have the base64 string of the image and I want to transform it to a file object and zip it, then to send the zipped image file to this API using XMLHttpRequest. But the response text is "Service requires a list of (zipped) images".
I attach my HTTP request header and parameters in below screenshots.
Although we see a messy code in parameters, the zipped file Download here is created successfully.
If you cannot download the zipped file, please refer to the screenshot below.
Everything seems to be fine. However, the response text is as below with status 400.
My javascript code is shown below. What is wrong? It drives me crazy...
dataURItoBlob: function(dataURI, fileName) {
//convert base64/URLEncoded data component to raw binary data held in a string
var byteString;
if (dataURI.split(',')[0].indexOf('base64') >= 0)
byteString = atob(dataURI.split(',')[1]);
else
byteString = unescape(dataURI.split(',')[1]);
//separate out the mime component
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
//write the bytes of the string to a typed array
var ia = new Uint8Array(byteString.length);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
var blob = new Blob([ia], {encoding:"UTF-8",type:mimeString});
//A Blob() is almost a File() - it's just missing the two properties below which we will add
blob.lastModifiedDate = new Date();
blob.name = fileName + '.' + mimeString.split('/')[1];
return blob;
},
onSubmit: function(oEvent) {
var oImage = this.getView().byId('myImage');
//oImage.getSrc() : 'data:image/png;base64,iVBORw0KGgo...'
var imageFile = this.dataURItoBlob(oImage.getSrc(), 'myImage');
var zip = new JSZip();
zip.file(imageFile.name, imageFile);
zip.generateAsync({
type:"blob",
compression: 'DEFLATE', // force a compression for this file
compressionOptions: {
level: 6,
},
}).then(function(content) {
//saveAs(content, "hello.zip");
// start the busy indicator
var oBusyIndicator = new sap.m.BusyDialog();
oBusyIndicator.open();
var formData = new FormData();
formData.append('files', content, 'myImage.zip');
var xhr = new XMLHttpRequest();
xhr.withCredentials = false;
xhr.addEventListener("readystatechange", function () {
if (this.readyState === this.DONE) {
oBusyIndicator.close();
//navigator.notification.alert(this.responseText);
console.log(this.responseText);
}
});
//setting request method
//API endpoint for API sandbox
//Destionation '/SANDBOX_API' in HCP is configured as 'https://sandbox.api.sap.com'
var api = "/SANDBOX_API/ml/featureextraction/inference_sync";
xhr.open("POST", api);
//adding request headers
xhr.setRequestHeader("Content-Type", "multipart/form-data");
xhr.setRequestHeader("Accept", "application/json");
//API Key for API Sandbox
xhr.setRequestHeader("APIKey", "yQd5Oy785NkAIob6g1eNwctBg4m1LGQS");
//sending request
xhr.send(formData);
});
},
I fix this issue by myself. I put my solution just for others' information. It's very easy and only below code needs to be removed before sending request. I have no idea why. Please suggest if you know the reason. Thanks in advance!
xhr.setRequestHeader("Content-Type", "multipart/form-data");
Best Regards,
Shelwin Wei
I need to modify existing frontend (angular) code that involves uploading files to a server. Now the files need to be encrypted before being uploaded.
The current approach uses FormData to append a number of files and send them in a single request as shown below:
function uploadFiles(wrappers){
var data = new FormData();
// Add each file
for(var i = 0; i < wrappers.length; i++){
var wrapper = wrappers[i];
var file = wrapper.file;
data.append('file_' + i, file);
}
$http.post(uri, data, requestCfg).then(
/*...*
I have been using Forge in other projects, but never in this sort of context and don't really see how to encrypt files on the fly and still append them as FormData contents.
Forge provides an easy API:
var key = forge.random.getBytesSync(16);
var iv = forge.random.getBytesSync(8);
// encrypt some bytes
var cipher = forge.rc2.createEncryptionCipher(key);
cipher.start(iv);
cipher.update(forge.util.createBuffer(someBytes));
cipher.finish();
var encrypted = cipher.output;
The backend recieves files using Formidable and all the file hanlding is already wired. I would thus like to stick to using the existing front-end logic but simply insert the encryption logic. In that, it's not the entire formdata that must be encrypted... I haven't found a good lead yet to approach this.
Suggestions are very welcome!
Ok, found a solution and added the decrypt code as well. This adds a layer of async code.
function appendFile(aFile, idx){
// Encrypt if a key was provided for this protocol test
if(!key){
data.append('dicomfile_' + idx, file);
appendedCount++;
onFileAppended();
}
else{
var reader = new FileReader();
reader.onload = function(){
// 1. Read bytes
var arrayBuffer = reader.result;
var bytes = new Uint8Array(arrayBuffer); // byte array aka uint8
// 2. Encrypt
var cipher = forge.cipher.createCipher('AES-CBC', key);
cipher.start({iv: iv});
cipher.update(forge.util.createBuffer(bytes));
cipher.finish();
// 3. To blob (file extends blob)
var encryptedByteCharacters = cipher.output.getBytes(); // encryptedByteCharacters is similar to an ATOB(b64) output
// var asB64 = forge.util.encode64(encryptedBytes);
// var encryptedByteCharacters = atob(asB64);
// Convert to Blob object
var blob = byteCharsToBlob(encryptedByteCharacters, "application/octet-stream", 512);
// 4. Append blob
data.append('dicomfile_' + idx, blob, file.name);
// Decrypt for the sake of testing
if(true){
var fileReader = new FileReader();
fileReader.onload = function() {
arrayBuffer = this.result;
var bytez = new Uint8Array(arrayBuffer);
var decipher = forge.cipher.createDecipher('AES-CBC', key);
decipher.start({iv: iv});
decipher.update(forge.util.createBuffer(bytez));
decipher.finish();
var decryptedByteCharacters = decipher.output.getBytes();
var truz = bytes === decryptedByteCharacters;
var blob = byteCharsToBlob(decryptedByteCharacters, "application/octet-stream", 512);
data.append('decrypted_' + idx, blob, file.name + '.decrypted');
appendedCount++;
onFileAppended();
};
fileReader.readAsArrayBuffer(blob);
}
else{
// z. Resume processing
appendedCount++;
onFileAppended();
}
}
// Read file
reader.readAsArrayBuffer(aFile);
}
}
function onFileAppended(){
// Only proceed when all files were appended and optionally encrypted (async)
if(appendedCount !== wrappers.length) return;
/* resume processing, upload or do whathever */
I'm testing out AJAX file uploads, I want to eventually be able to do it with images, but right now I'm happy to get text files working. I'll put a snip of my code below.
JS
var blob //creates blob global variable.
//reads in blob to blob global var on change of file input.
function readBlob(evt){
var files = evt.target.files;
if(!files.length){
alert("no file");
return;
}
var file = files[0];
var reader = new FileReader();
reader.onloadend = function(evt){
if (evt.target.readyState == FileReader.DONE){
document.getElementById("jsReturnDiv").textContent = evt.target.result;
blob = new Blob([evt.target.result], {type: 'text/plain'});
}
};
reader.readAsBinaryString(file);
}
function post(){
var i = 0;
xhr.open("POST","what.php",true);
xhr.setRequestHeader("content-type","text/plain");
xhr.onreadystatechange = function(){
if(xhr.status == 200 && xhr.readyState == 4){
callback(xhr.responseText);
}
log(i,xhr.responseText);
i++;
}
xhr.send(blob);
}
function callback(text){ //HUEHUEHUE
document.getElementById("phpReturnDiv").innerHTML = text + "</br>___encoded</br>" + jsVarDump(blob) //+ "</br>___decoded</br>" + jsVarDump(decodeURIComponent(blob));
}
function log(index, text){
console.log("index:"+index+"_______________________\n"+text);
}
PHP
<?php
var_dump($_POST);
?>
The problem that I get is that the output of the php is always:
array(0) { }
I was wondering if it was immediately obvious what I'm doing wrong. File is interpreted as a binary string and fed into a blob object, with content type text/plain, then POSTed as text/plain.
Should I be using a different content-type header in my XHR object? Am I handling the blob incorrectly?
Thanks!
You'll have to read php://input to get data from a post with content type text/plain. $_POST is only populated with application/x-www-form-urlencoded and multipart/form-data
$text = file_get_contents('php://input');
To make a long story short:
How to Asynchronously write an ArrayBuffer directly to file using nsIArrayBufferInputStream in Firefox extension ?
It seems that MDN does not have any documentation on nsIArrayBufferInputStream.
I know I can use nsIStringInputStream and convert the BufferArray to String, but this poses a big performance hit
also converting ArrayBuffer to string using this code:
String.fromCharCode.apply(null, new Uint16Array(buf));
Does not work if the buffer is 500 KB or bigger, so we must loop over it one char at a time:
for (let i = 0; i < buf.length; i++){
s += String.fromCharCode(buf16[i]);
}
Or, I can use nsIBinaryOutputStream.writeByteArray but it cannot be used with NetUtil.asyncCopy (or can it?)
//this works ok, but is synchronous :-(
function writeBinFile(aFile, data){
Components.utils.import("resource://gre/modules/FileUtils.jsm");
let nsFile = Components.Constructor("#mozilla.org/file/local;1", Ci.nsILocalFile, "initWithPath");
if(typeof aFile === 'string') aFile = nsFile(aFile);
var stream = FileUtils.openSafeFileOutputStream(aFile, FileUtils.MODE_WRONLY | FileUtils.MODE_CREATE);
var binaryStream = Cc["#mozilla.org/binaryoutputstream;1"].createInstance(Ci.nsIBinaryOutputStream);
binaryStream.setOutputStream(stream);
binaryStream.writeByteArray(data, data.length);
FileUtils.closeSafeFileOutputStream(stream);
}
And the long story is...
I have been trying to use nsIArrayBufferInputStream
http://dxr.mozilla.org/mozilla-central/source/netwerk/base/public/nsIArrayBufferInputStream.idl
but with no success. the code I tried:
function fileWrite(file, data, callback) {
Cu.import("resource://gre/modules/FileUtils.jsm");
Cu.import("resource://gre/modules/NetUtil.jsm");
let nsFile = Components.Constructor("#mozilla.org/file/local;1", Ci.nsILocalFile, "initWithPath");
if (typeof file == 'string') file = new nsFile(file);
let ostream = FileUtils.openSafeFileOutputStream(file)
let istream = Cc["#mozilla.org/io/arraybuffer-input-stream;1"].createInstance(Ci.nsIArrayBufferInputStream);
istream.setData(data, 0, data.length);
let bstream = Cc["#mozilla.org/binaryinputstream;1"].createInstance(Ci.nsIBinaryInputStream);
bstream.setInputStream(istream);
//NetUtil.asyncCopy(istream, ostream,
NetUtil.asyncCopy(bstream, ostream,
function(status) {
if (callback) callback(Components.isSuccessCode(status));
}
);
}
The ArrayBuffer data param is the responce from XMLHttpRequest:
function getBinFile(url, dir) {
let nsFile = Components.Constructor("#mozilla.org/file/local;1", Ci.nsILocalFile, "initWithPath");
let oReq = new XMLHttpRequest();
oReq.open("GET", url, true);
oReq.responseType = "arraybuffer";
oReq.onload = function(oEvent) {
var arrayBuffer = oReq.response;
if (arrayBuffer) {
//let byteArray = new Uint8Array(arrayBuffer);
let byteArray = arrayBuffer;
dir = /\\$/.test(dir) ? dir: dir + '\\';
let file = nsFile(dir + decodeURIComponent(url.split('/').pop()));
fileWrite(file, byteArray);
}
};
oReq.send(null);
}
calling like this:
getBinFile( 'http://....', 'c:\\demo\\');
A file is created but with no contents!
I'm answering myself in case anyone stumbles upon this question...
with help from Josh Matthews (of Mozilla) i found the answer:
use byteLength instead of length
istream.setData(data, 0, data.byteLength);