JS / PHP ajax posting Blob - javascript

I'm testing out AJAX file uploads, I want to eventually be able to do it with images, but right now I'm happy to get text files working. I'll put a snip of my code below.
JS
var blob //creates blob global variable.
//reads in blob to blob global var on change of file input.
function readBlob(evt){
var files = evt.target.files;
if(!files.length){
alert("no file");
return;
}
var file = files[0];
var reader = new FileReader();
reader.onloadend = function(evt){
if (evt.target.readyState == FileReader.DONE){
document.getElementById("jsReturnDiv").textContent = evt.target.result;
blob = new Blob([evt.target.result], {type: 'text/plain'});
}
};
reader.readAsBinaryString(file);
}
function post(){
var i = 0;
xhr.open("POST","what.php",true);
xhr.setRequestHeader("content-type","text/plain");
xhr.onreadystatechange = function(){
if(xhr.status == 200 && xhr.readyState == 4){
callback(xhr.responseText);
}
log(i,xhr.responseText);
i++;
}
xhr.send(blob);
}
function callback(text){ //HUEHUEHUE
document.getElementById("phpReturnDiv").innerHTML = text + "</br>___encoded</br>" + jsVarDump(blob) //+ "</br>___decoded</br>" + jsVarDump(decodeURIComponent(blob));
}
function log(index, text){
console.log("index:"+index+"_______________________\n"+text);
}
PHP
<?php
var_dump($_POST);
?>
The problem that I get is that the output of the php is always:
array(0) { }
I was wondering if it was immediately obvious what I'm doing wrong. File is interpreted as a binary string and fed into a blob object, with content type text/plain, then POSTed as text/plain.
Should I be using a different content-type header in my XHR object? Am I handling the blob incorrectly?
Thanks!

You'll have to read php://input to get data from a post with content type text/plain. $_POST is only populated with application/x-www-form-urlencoded and multipart/form-data
$text = file_get_contents('php://input');

Related

Migrate FileReader ReadAsBinaryString() to ReadAsArrayBuffer() or ReadAsText()

I realize that the new Mozilla Firefox return allocation size overflow (on FileReader.ReadAsBinaryString()) when the file bigger than 200MB (something like that).
Here's some of my code on test for client web browser:
function upload(fileInputId, fileIndex)
{
var file = document.getElementById(fileInputId).files[fileIndex];
var blob;
var reader = new FileReader();
reader.readAsBinaryString(file);
reader.onloadend = function(evt)
{
xhr = new XMLHttpRequest();
xhr.open("POST", "upload.php", true);
XMLHttpRequest.prototype.mySendAsBinary = function(text){
var data = new ArrayBuffer(text.length);
var ui8a = new Uint8Array(data, 0);
for (var i = 0; i < text.length; i++){
ui8a[i] = (text.charCodeAt(i) & 0xff);
}
if(typeof window.Blob == "function")
{
blob = new Blob([data]);
}else{
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(data);
blob = bb.getBlob();
}
this.send(blob);
}
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", function(e) {
var position = e.position || e.loaded;
var total = e.totalSize || e.total;
var percentage = Math.round((position/total)*100);
});
xhr.onreadystatechange = function()
{
if(xhr.readyState == 4)
{
if(xhr.status == 200)
{
console.log("Done");
}else{
console.log("Fail");
}
}
};
xhr.mySendAsBinary(evt.target.result);
};
}
So I tried change it to FileReader.ReadAsArrayBuffer(), the error has not shown up but the data are not the same (as it's not read as binary string).
Did anyone has any solution to solve this problem? Is there any way that we can upload bigger file from JS to Web Server in raw/string other than FileReader implementation?
I read on Mozilla JS Documentation that said:
This feature is non-standard and is not on a standards track. Do not
use it on production sites facing the Web: it will not work for every
user. There may also be large incompatibilities between
implementations and the behavior may change in the future. - Mozilla
If not ReadAsBinaryString, the how to implement ReadAsArrayBuffer or ReadAsText
To send Files to a web-server, you simply don't need js. HTML alone is well able to do this with the <form> element.
Now if you want to go through js, for e.g catch the different ProgressEvents, then you can send directly your File, no need to read it whatsoever on your side.
To do this, you've got two (or three) solutions.
If your server is able to handle PUT requests, you can simply xhr.send(file);.
Otherwise, you'd have to go through a FormData.
// if you really want to go the XHR way
document.forms[0].onsubmit = function handleSubmit(evt) {
if(!window.FormData) { // old browser use the <form>
return;
}
// now we handle the submit through js
evt.preventDefault();
var fD = new FormData(this);
var xhr = new XMLHttpRequest();
xhr.onprogress = function handleProgress(evt){};
xhr.onload = function handleLoad(evt){};
xhr.onerror = function handleError(evt){};
xhr.open(this.method, this.action);
// xhr.send(fD); // won't work in StackSnippet
log(fD, this.method, this.action); // so we just log its content
};
function log(formData, method, action) {
console.log('would have sent');
for(let [key, val] of formData.entries())
console.log(key, val);
console.log('through', method);
console.log('to', action);
}
<!-- this in itself is enough -->
<form method="POST" action="your_server.page">
<input type="file" name="file_upload">
<input type="submit">
</form>
Now, you sent a comment saying that you can't upload Files bigger than 1GB to your server.
This limitation is only due to your server's config, so the best if you want to accept such big files is to configure it correctly.
But if you really want to send your File by chunks, even then don't get off of the Blob interface.
Indeed Blobs have a slice() method, so use it.
document.forms[0].onsubmit = function handleSubmit(evt) {
evt.preventDefault();
var file = this.elements[0].files[0];
var processed = 0;
if(file) {
// var MAX_CHUNK_SIZE = Math.min(file.size, server_max_size);
// for demo we just split in 10 chunks
var MAX_CHUNK_SIZE = file.size > 10 ? (file.size / 10) | 0 : 1;
loadChunk(0);
}
function loadChunk(start) {
var fD = new FormData();
var sliced = file.slice(start, start+MAX_CHUNK_SIZE);
processed += sliced.size; // only for demo
fD.append('file_upload', sliced, file.name);
fD.append('starting_index', start);
if(start + MAX_CHUNK_SIZE >= file.size) {
fD.append('last_chunk', true);
}
var xhr = new XMLHttpRequest();
xhr.open('POST', 'your_server.page');
xhr.onload = function onchunkposted(evt) {
if(start + MAX_CHUNK_SIZE >= file.size) {
console.log('All done. Original file size: %s, total of chunks sizes %s', file.size, processed);
return;
}
loadChunk(start + MAX_CHUNK_SIZE);
};
// xhr.send(fD);
log(fD);
setTimeout(xhr.onload, 200); // fake XHR onload
}
};
function log(formData, method, action) {
console.log('would have sent');
for(let [key, val] of formData.entries())
console.log(key, val);
}
<form method="POST" action="your_server.page">
<input type="file" name="file_upload">
<input type="submit">
</form>
But you absolutely don't need to go through a FileReader for this operation.
Actually the only case where it could make sense to use a FileReader here would be for some Android browsers that don't support passing Blob into a FormData, even though they don't give a single clue about it.
So in this case, you'd have to set up your server to let you know the request was empty, and then only read the File as a dataURI that you would send in-place of the original File.
after a long week of research and sleepless nights, you can't upload binary strings without breaking it, also base64 doesn't work for all files, only images, the journey from the client-side to the server breaks the bytes being sent
Kaiido statement is correct
To send Files to a web-server, you simply don't need js
But that doesn't answer my question. Using the Simple XMLHttpRequest() can upload the file and track those progress as well. But still, it's not it. The direct upload, either from the <form> or using XMLHttpRequest() will need to increase your upload limit in php setting. This method is not convenience for me. How if the client upload file as 4GB? So I need to increase to 4GB. Then next time, client upload file as 6GB, then I have to increase to 6GB.
Using the slice() method is make sense for bigger file as we can send it part by part to server. But this time I am not using it yet.
Here's some of my test the worked as I want. I hope some expert could correct me if I am wrong.
My Upload.js
function upload(fileInputId, fileIndex)
{
var file = document.getElementById(fileInputId).files[fileIndex];
var blob;
var reader = new FileReader();
reader.readAsArrayBuffer(file);
reader.onloadend = function(evt)
{
xhr = new XMLHttpRequest();
xhr.open("POST", "upload.php?name=" + base64_encode(file.name), true);
XMLHttpRequest.prototype.mySendAsBinary = function(text){
var ui8a = new Uint8Array(new Int8Array(text));
if(typeof window.Blob == "function")
{
blob = new Blob([ui8a]);
}else{
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(ui8a);
blob = bb.getBlob();
}
this.send(blob);
}
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", function(e) {
var position = e.position || e.loaded;
var total = e.totalSize || e.total;
var percentage = Math.round((position/total)*100);
console.log(percentage);
});
xhr.onreadystatechange = function()
{
if(xhr.readyState == 4)
{
if(xhr.status == 200)
{
console.log("Done");
}else{
console.log("Fail");
}
}
};
xhr.mySendAsBinary(evt.target.result);
};
}
Below is how the PHP server listen to the ArrayBuffer from JS
if(isset($_GET["name"])){
$name = base64_decode($_GET["name"]);
$loc = $name;
$inputHandler = fopen('php://input', "r");
$fileHandler = fopen($loc, "w+");
while(true) {
//$buffer = fgets($inputHandler, 1024);
$buffer = fread($inputHandler, 1000000);
if (strlen($buffer) == 0) {
fclose($inputHandler);
fclose($fileHandler);
return true;
}
//$b = base64_encode($buffer);
fwrite($fileHandler, $buffer);
}
}
The above method works well. The FileReader read the file as ArrayBuffer the upload to server. For me, migrating from ReadAsBinaryString() to ReadAsArrayBuffer() is important and ReadAsArrayBuffer() has some better performance rather than ReadAsBinaryString()
Here's some reason, why some developer relies to FileReader API:
Streaming. Using this method, the file will be stream, so we can avoid setting the php multiple time.
Easy Encrypt. As the file is sending via ArrayBuffer, it is easy for developer to Encrypt the file while upload in progress.
This method also support upload any type of file. I ve done some test and I realize that ReadAsArrayBuffer() method are more faster than ReadAsBinaryString() and direct form upload. You may try it.
Security Notice
The above code is only under test code, to use it in production, you have to consider sending the data in GET or POST under HTTPS.

Use the base64 preview of the binary data response (zip file) in angularjs

I always get this error in the downloaded zip file C:\Users\me\Downloads\test.zip: Unexpected end of archive
My current code is:
var blob = new Blob([data], { // data here is the binary content
type: 'octet/stream',
});
var zipUrl = window.URL.createObjectURL(blob);
var fileName = orderNo;
fileName += '.zip';
downloadFile(null, fileName, null, zipUrl, null); // just creates a hidden anchor tag and triggers the download
The response of the call is a binary (I think). Binary Content Here
But the preview is a base64. Base64 Content. And it is the correct one. The way I verify it is by using this fiddle.
You can refer to the screenshot of the network here
I put the base64 content in this line var sampleBytes = base64ToArrayBuffer(''); And the zip downloaded just opens fine.
Things I have tried so far.
Adding this headers to the GET call
var headers = {
Accept: 'application/octet-stream',
responseType: 'blob',
};
But I get Request header field responseType is not allowed by Access-Control-Allow-Headers in preflight response.
We're using an already ajax.service.js in our AngularJS project.
From this answer
var blob = new Blob([yourBinaryDataAsAnArrayOrAsAString], {type: "application/octet-stream"});
var fileName = "myFileName.myExtension";
saveAs(blob, fileName);
There are other things that I have tried that I have not listed. I will edit the questions once I find them again
But where I'm current at right now. The preview is correct base64 of the binary file. Is it possible to use that instead of the binary? (If it is I will not find the other methods that I've tested) I tried some binary to base64 converters but they don't work.
So I just went and ditched using the ajax.service.js, that we have, for this specific call.
I used the xhr snippet from this answer. I just added the headers necessary for our call: tokens and auth stuff.
And I used this code snippet for the conversion thing.
And the code looks like this:
fetchBlob(url, function (blob) {
// Array buffer to Base64:
var base64 = btoa(String.fromCharCode.apply(null, new Uint8Array(blob)));
var blob = new Blob([base64ToArrayBuffer(base64)], {
type: 'octet/stream',
});
var zipUrl = window.URL.createObjectURL(blob);
var fileName = orderNo;
fileName += ' Attachments ';
fileName += moment().format('DD-MMM-YYYY');
fileName += '.zip';
downloadFile(null, fileName, null, zipUrl, null); // create a hidden anchor tag and trigger download
});
function fetchBlob(uri, callback) {
var xhr = new XMLHttpRequest();
xhr.open('GET', uri, true);
xhr.responseType = 'arraybuffer';
var x = AjaxService.getAuthHeaders();
xhr.setRequestHeader('auth_stuff', x['auth_stuff']);
xhr.setRequestHeader('token_stuff', x['token_stuff']);
xhr.setRequestHeader('Accept', 'application/octet-stream');
xhr.onload = function (e) {
if (this.status == 200) {
var blob = this.response;
if (callback) {
callback(blob);
}
}
};
return xhr.send();
};
function base64ToArrayBuffer(base64) {
var binaryString = window.atob(base64);
var binaryLen = binaryString.length;
var bytes = new Uint8Array(binaryLen);
for (var i = 0; i < binaryLen; i++) {
var ascii = binaryString.charCodeAt(i);
bytes[i] = ascii;
};
return bytes;
}

No file was sent to SAP Leonardo Image Feature Extraction API using FormData

I'm testing SAP Leonardo Image Feature Extraction API (https://sandbox.api.sap.com/ml/featureextraction/inference_sync). I have the base64 string of the image and I want to transform it to a file object and zip it, then to send the zipped image file to this API using XMLHttpRequest. But the response text is "Service requires a list of (zipped) images".
I attach my HTTP request header and parameters in below screenshots.
Although we see a messy code in parameters, the zipped file Download here is created successfully.
If you cannot download the zipped file, please refer to the screenshot below.
Everything seems to be fine. However, the response text is as below with status 400.
My javascript code is shown below. What is wrong? It drives me crazy...
dataURItoBlob: function(dataURI, fileName) {
//convert base64/URLEncoded data component to raw binary data held in a string
var byteString;
if (dataURI.split(',')[0].indexOf('base64') >= 0)
byteString = atob(dataURI.split(',')[1]);
else
byteString = unescape(dataURI.split(',')[1]);
//separate out the mime component
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
//write the bytes of the string to a typed array
var ia = new Uint8Array(byteString.length);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
var blob = new Blob([ia], {encoding:"UTF-8",type:mimeString});
//A Blob() is almost a File() - it's just missing the two properties below which we will add
blob.lastModifiedDate = new Date();
blob.name = fileName + '.' + mimeString.split('/')[1];
return blob;
},
onSubmit: function(oEvent) {
var oImage = this.getView().byId('myImage');
//oImage.getSrc() : 'data:image/png;base64,iVBORw0KGgo...'
var imageFile = this.dataURItoBlob(oImage.getSrc(), 'myImage');
var zip = new JSZip();
zip.file(imageFile.name, imageFile);
zip.generateAsync({
type:"blob",
compression: 'DEFLATE', // force a compression for this file
compressionOptions: {
level: 6,
},
}).then(function(content) {
//saveAs(content, "hello.zip");
// start the busy indicator
var oBusyIndicator = new sap.m.BusyDialog();
oBusyIndicator.open();
var formData = new FormData();
formData.append('files', content, 'myImage.zip');
var xhr = new XMLHttpRequest();
xhr.withCredentials = false;
xhr.addEventListener("readystatechange", function () {
if (this.readyState === this.DONE) {
oBusyIndicator.close();
//navigator.notification.alert(this.responseText);
console.log(this.responseText);
}
});
//setting request method
//API endpoint for API sandbox
//Destionation '/SANDBOX_API' in HCP is configured as 'https://sandbox.api.sap.com'
var api = "/SANDBOX_API/ml/featureextraction/inference_sync";
xhr.open("POST", api);
//adding request headers
xhr.setRequestHeader("Content-Type", "multipart/form-data");
xhr.setRequestHeader("Accept", "application/json");
//API Key for API Sandbox
xhr.setRequestHeader("APIKey", "yQd5Oy785NkAIob6g1eNwctBg4m1LGQS");
//sending request
xhr.send(formData);
});
},
I fix this issue by myself. I put my solution just for others' information. It's very easy and only below code needs to be removed before sending request. I have no idea why. Please suggest if you know the reason. Thanks in advance!
xhr.setRequestHeader("Content-Type", "multipart/form-data");
Best Regards,
Shelwin Wei

Why does AJAX output comes with wrong encoding?

I'm getting a file from a server with AJAX (Angular).The file is a simple XLSX document, sent like this:
ob_start();
$file = \PHPExcel_IOFactory::createWriter($xls, 'Excel2007');
$file->save('php://output');
$response->setContent(ob_get_clean());
$response->headers->replace(array(
'Content-Type' => 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'Content-Disposition' => 'attachment;filename=file.xlsx"'
));
When I make a request from frontend, I use Accept header too. Then I save the file with angular-file-saver using FileSaver.js and Blob.js.
But the received file is corrupt and I can't open it in Excel: it's size is (for example) 12446 bytes, but Chrome's DevTools Network tab shows responses Content-Length header as 7141 bytes.
How can I solve this problem?
UPD:
I'm sending a request like this:
$http.get(baseURL + '/' + entity + '/export/?' + condition + sort, {
headers: {'Accept': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet; charset=utf-8'}
});
and downloading file just like this:
var data = new Blob([response.data], {type: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;charset=utf-8'});
FileSaver.saveAs(data, 'file.xlsx');
The way I got around the problem was using plain JS AJAX, instead of AngularJS. (There might be a problem with AngularJS and JQuery handling binary responses.)
This should work:
var request = new XMLHttpRequest();
request.open('GET', 'http://yourserver/yourpath', true);
request.responseType = 'blob';
request.onload = function (e) {
if (this.status === 200) {
var blob = this.response;
if (window.navigator.msSaveOrOpenBlob) {
var fileNamePattern = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/;
window.navigator.msSaveBlob(blob, fileNamePattern.exec(request.getResponseHeader("content-disposition"))[1]);
} else {
var downloadLink = window.document.createElement('a');
var contentTypeHeader = request.getResponseHeader("Content-Type");
var b = new Blob([blob], { type: contentTypeHeader });
downloadLink.href = window.URL.createObjectURL(b);
var fileNamePattern = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/;
downloadLink.download = fileNamePattern.exec(request.getResponseHeader("content-disposition"))[1];
document.body.appendChild(downloadLink);
downloadLink.click();
document.body.removeChild(downloadLink);
window.URL.revokeObjectURL(b);
}
}
};
request.send();
Code is based on this and this.
FYI, I found that new Blob([response.data], ...) returns almost double the size of response.data when response.data is not returned as blob, but text/plain or application/vnd.openxmlformats-officedocument.spreadsheetml.sheet. To get around it, you need to pass it an array of bytes instead:
var i, l, d, array;
d = this.result;
l = d.length;
array = new Uint8Array(l);
for (var i = 0; i < l; i++){
array[i] = d.charCodeAt(i);
}
var b = new Blob([array], {type: 'application/octet-stream'});
window.location.href = URL.createObjectURL(b);
Code is from here.
Anyways, since the AJAX response is not correct using AngularJS, you won't get a valid xlsx file this way. You need to go with vanilla JS.

Why is my file data being lost in transfer?

Ok, so I found this method for uploading large files from a diferent question:
function sliceit(file)
{
fr = new FileReader;
chunkSize = 1000000;
chunks = Math.ceil(file.size / chunkSize);
chunk = 0;
var isstart = true;
function loadNext() {
start, end,
start = chunk * chunkSize;
if (start > file.size)
start = end+1;
end = start + (chunkSize -1) >= file.size ? file.size : start + (chunkSize -1);
fr.onloadend = function(e)
{
while(!fr.result || fr.result == "" || fr.result == undefined)
{
}
fdata = fr.result;
xmlhttp, form, data;
data = {};
data.filename = document.getElementById("filename0").value;
data.username = document.getElementById("username").value;
data.password = document.getElementById("password").value;
data.public = document.getElementById("public").value;
if(isstart)
{
var form = new FormData();
form.append("filename", data.filename);
form.append("username", data.username);
form.append("password", data.password);
form.append("public", data.public);
form.append("filebytes", fdata);
xmlhttp = jQuery.ajaxSettings.xhr();
xmlhttp.open("POST", "../MakeEmptyFile.php", false);
isstart = false;
}
else
{
form = new FormData();
form.append("filename", data.filename);
form.append("username", data.username);
form.append("password", data.password);
form.append("filebytes", fdata);
xmlhttp = jQuery.ajaxSettings.xhr();
xmlhttp.open("POST", "../AddTo.php", false);
}
xmlhttp.send(form);
console.log(xmlhttp.responseText);
if (++chunk <= chunks)
{
loadNext();
}
else
{
stdlog("Completed.");
}
};
fr.readAsText(file.slice(start, end));
}
loadNext();
}
For some reason, every time I load the XMLHttpRequest, it shows that the file data was never actually sent. Is this because it was too big, or something? Thanks!
It sounds like you are missing some basic concepts related to the File API. My answer here feeds off of the comments from your question.
First off, the proper way to upload a file in chunks is to use the slice method on the Blob or File object. The slice method will always return another Blob. This Blob represents the portion of the Blob or File you have carved out. You can then append that Blob to your FormData object. Your FormData object will be sent in a multipart-encoded POST request. The Blob will be represented inside of one of the multipart boundaries in the request as a form field, just like any other parameters you have appended to your FormData object.
Server-side, you would read this Blob just as you would read any other file when parsing a MPE request. This Blob will need to be temporarily stored server-side, along with any other Blobs that make up the complete file. Once you have received all parts, you will need to assemble them in the correct order server-side.
Your current method involves slicing the file into blobs, reading the entire contents of the blob, and then attaching the contents to your FormData object. This seems redundant and inefficient to me.

Categories

Resources