How to download and then upload the file? - javascript

Tried to use the following code, but it doesn't work properly:
// download the file first
var req = new XMLHttpRequest();
req.open('GET', url, false);
req.overrideMimeType('text/plain; charset=x-user-defined');
req.send(null);
if (req.status != 200) return '';
// upload the file
req.open("POST", "http://mysite.com/upload", false);
req.setRequestHeader("Content-Length", req.responseText.length);
req.sendAsBinary(req.responseText); // What should I pass here?
if (req.status != 200) return '';
return req.responseText;
sendAsBinary is firefox function.
Upd. Also I've tried to upload that as part of the form:
var response = req.responseText;
var formData = new FormData();
formData.append("file", response);
req.open("POST", "http://mysite.com/upload", false);
req.send(formData);
But still not full data is received by the server.

Finally I've used the approach with temp file:
var downloadCompleted = false;
// download the file first
var persist = Components.classes["#mozilla.org/embedding/browser/nsWebBrowserPersist;1"]
.createInstance(Components.interfaces.nsIWebBrowserPersist);
// get OS temp folder
var file = Components.classes["#mozilla.org/file/directory_service;1"]
.getService(Components.interfaces.nsIProperties)
.get("TmpD", Components.interfaces.nsIFile);
file.append("temp.ext");
file.createUnique(Components.interfaces.nsIFile.NORMAL_FILE_TYPE, 0666);
var fURI = Services.io.newURI(url,null,null);
const nsIWBP = Components.interfaces.nsIWebBrowserPersist;
const flags = nsIWBP.PERSIST_FLAGS_REPLACE_EXISTING_FILES;
persist.persistFlags = flags | nsIWBP.PERSIST_FLAGS_FROM_CACHE;
persist.progressListener = {
onProgressChange: function(aWebProgress, aRequest, aCurSelfProgress, aMaxSelfProgress, aCurTotalProgress, aMaxTotalProgress) {
},
onStateChange: function(aWebProgress, aRequest, aStateFlags, aStatus) {
if (aStateFlags & Components.interfaces.nsIWebProgressListener.STATE_STOP) {
downloadCompleted = true; // file has been downloaded
}
}
}
persist.saveURI(fURI, null, null, null, "", file);
var thread = Components.classes["#mozilla.org/thread-manager;1"]
.getService(Components.interfaces.nsIThreadManager)
.currentThread;
while (!downloadCompleted) // emulate synchronous request, not recommended approach
thread.processNextEvent(true);
// upload the file
var stream = Components.classes["#mozilla.org/network/file-input-stream;1"]
.createInstance(Components.interfaces.nsIFileInputStream);
stream.init(file, 0x04 | 0x08, 0644, 0x04); // file is an nsIFile instance
// try to determine the MIME type of the file
var mimeType = "text/plain";
try {
var mimeService = Components.classes["#mozilla.org/mime;1"]
.getService(Components.interfaces.nsIMIMEService);
mimeType = mimeService.getTypeFromFile(file); // file is an nsIFile instance
}
catch(e) { /* just use text/plain */ }
var req = Components.classes["#mozilla.org/xmlextras/xmlhttprequest;1"]
.createInstance(Components.interfaces.nsIXMLHttpRequest);
req.open('POST', "http://mysite.com/upload", false);
req.setRequestHeader('Content-Type', mimeType);
req.send(stream);
// delete the file
file.remove(false);

You need to store the responseText in an intermediate variable before reusing the req object.
// download the file first
var req = new XMLHttpRequest();
req.open('GET', url, false);
req.overrideMimeType('text/plain; charset=x-user-defined');
req.send(null);
if (req.status != 200) return '';
var response = req.responseText;
// upload the file
req.open("POST", "http://mysite.com/upload", false);
req.setRequestHeader("Content-Length", response.length);
req.sendAsBinary(response);
if (req.status != 200) return '';
return req.responseText;
Update
Per the MDN page Using XMLHttpRequest, it looks like the above code won't work. Following is the proper way to get the binary response. In the end, you will have an array of unsigned integers which you could send back to the server and convert to binary. I think.
//req.responseType is only defined for FF6+
req.responseType = "arraybuffer";
req.send(null);
//req.response is for FF6+, req.mozResponseArrayBuffer is for FF < 6
var buffer = req.mozResponseArrayBuffer || req.response;
if (buffer) {
var byteArray = new Uint8Array(buffer);
}
Update 2
To submit the byteArray to a server, I would try something like the following untested, almost guaranteed not to work code.
req.open("POST", "http://mysite.com/upload", false);
req.setRequestHeader("Content-Length", byteArray.length);
//if this doesn't work, try byteArray.buffer
//if byteArray.buffer works, try skipping 'var byteArray = new Uint8Array(buffer);' altogether and just sending the buffer directly
req.send(byteArray);
Update 3
Could Using XMLHttpRequest from JavaScript modules / XPCOM components have anything to do with your issue?

Related

Migrate FileReader ReadAsBinaryString() to ReadAsArrayBuffer() or ReadAsText()

I realize that the new Mozilla Firefox return allocation size overflow (on FileReader.ReadAsBinaryString()) when the file bigger than 200MB (something like that).
Here's some of my code on test for client web browser:
function upload(fileInputId, fileIndex)
{
var file = document.getElementById(fileInputId).files[fileIndex];
var blob;
var reader = new FileReader();
reader.readAsBinaryString(file);
reader.onloadend = function(evt)
{
xhr = new XMLHttpRequest();
xhr.open("POST", "upload.php", true);
XMLHttpRequest.prototype.mySendAsBinary = function(text){
var data = new ArrayBuffer(text.length);
var ui8a = new Uint8Array(data, 0);
for (var i = 0; i < text.length; i++){
ui8a[i] = (text.charCodeAt(i) & 0xff);
}
if(typeof window.Blob == "function")
{
blob = new Blob([data]);
}else{
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(data);
blob = bb.getBlob();
}
this.send(blob);
}
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", function(e) {
var position = e.position || e.loaded;
var total = e.totalSize || e.total;
var percentage = Math.round((position/total)*100);
});
xhr.onreadystatechange = function()
{
if(xhr.readyState == 4)
{
if(xhr.status == 200)
{
console.log("Done");
}else{
console.log("Fail");
}
}
};
xhr.mySendAsBinary(evt.target.result);
};
}
So I tried change it to FileReader.ReadAsArrayBuffer(), the error has not shown up but the data are not the same (as it's not read as binary string).
Did anyone has any solution to solve this problem? Is there any way that we can upload bigger file from JS to Web Server in raw/string other than FileReader implementation?
I read on Mozilla JS Documentation that said:
This feature is non-standard and is not on a standards track. Do not
use it on production sites facing the Web: it will not work for every
user. There may also be large incompatibilities between
implementations and the behavior may change in the future. - Mozilla
If not ReadAsBinaryString, the how to implement ReadAsArrayBuffer or ReadAsText
To send Files to a web-server, you simply don't need js. HTML alone is well able to do this with the <form> element.
Now if you want to go through js, for e.g catch the different ProgressEvents, then you can send directly your File, no need to read it whatsoever on your side.
To do this, you've got two (or three) solutions.
If your server is able to handle PUT requests, you can simply xhr.send(file);.
Otherwise, you'd have to go through a FormData.
// if you really want to go the XHR way
document.forms[0].onsubmit = function handleSubmit(evt) {
if(!window.FormData) { // old browser use the <form>
return;
}
// now we handle the submit through js
evt.preventDefault();
var fD = new FormData(this);
var xhr = new XMLHttpRequest();
xhr.onprogress = function handleProgress(evt){};
xhr.onload = function handleLoad(evt){};
xhr.onerror = function handleError(evt){};
xhr.open(this.method, this.action);
// xhr.send(fD); // won't work in StackSnippet
log(fD, this.method, this.action); // so we just log its content
};
function log(formData, method, action) {
console.log('would have sent');
for(let [key, val] of formData.entries())
console.log(key, val);
console.log('through', method);
console.log('to', action);
}
<!-- this in itself is enough -->
<form method="POST" action="your_server.page">
<input type="file" name="file_upload">
<input type="submit">
</form>
Now, you sent a comment saying that you can't upload Files bigger than 1GB to your server.
This limitation is only due to your server's config, so the best if you want to accept such big files is to configure it correctly.
But if you really want to send your File by chunks, even then don't get off of the Blob interface.
Indeed Blobs have a slice() method, so use it.
document.forms[0].onsubmit = function handleSubmit(evt) {
evt.preventDefault();
var file = this.elements[0].files[0];
var processed = 0;
if(file) {
// var MAX_CHUNK_SIZE = Math.min(file.size, server_max_size);
// for demo we just split in 10 chunks
var MAX_CHUNK_SIZE = file.size > 10 ? (file.size / 10) | 0 : 1;
loadChunk(0);
}
function loadChunk(start) {
var fD = new FormData();
var sliced = file.slice(start, start+MAX_CHUNK_SIZE);
processed += sliced.size; // only for demo
fD.append('file_upload', sliced, file.name);
fD.append('starting_index', start);
if(start + MAX_CHUNK_SIZE >= file.size) {
fD.append('last_chunk', true);
}
var xhr = new XMLHttpRequest();
xhr.open('POST', 'your_server.page');
xhr.onload = function onchunkposted(evt) {
if(start + MAX_CHUNK_SIZE >= file.size) {
console.log('All done. Original file size: %s, total of chunks sizes %s', file.size, processed);
return;
}
loadChunk(start + MAX_CHUNK_SIZE);
};
// xhr.send(fD);
log(fD);
setTimeout(xhr.onload, 200); // fake XHR onload
}
};
function log(formData, method, action) {
console.log('would have sent');
for(let [key, val] of formData.entries())
console.log(key, val);
}
<form method="POST" action="your_server.page">
<input type="file" name="file_upload">
<input type="submit">
</form>
But you absolutely don't need to go through a FileReader for this operation.
Actually the only case where it could make sense to use a FileReader here would be for some Android browsers that don't support passing Blob into a FormData, even though they don't give a single clue about it.
So in this case, you'd have to set up your server to let you know the request was empty, and then only read the File as a dataURI that you would send in-place of the original File.
after a long week of research and sleepless nights, you can't upload binary strings without breaking it, also base64 doesn't work for all files, only images, the journey from the client-side to the server breaks the bytes being sent
Kaiido statement is correct
To send Files to a web-server, you simply don't need js
But that doesn't answer my question. Using the Simple XMLHttpRequest() can upload the file and track those progress as well. But still, it's not it. The direct upload, either from the <form> or using XMLHttpRequest() will need to increase your upload limit in php setting. This method is not convenience for me. How if the client upload file as 4GB? So I need to increase to 4GB. Then next time, client upload file as 6GB, then I have to increase to 6GB.
Using the slice() method is make sense for bigger file as we can send it part by part to server. But this time I am not using it yet.
Here's some of my test the worked as I want. I hope some expert could correct me if I am wrong.
My Upload.js
function upload(fileInputId, fileIndex)
{
var file = document.getElementById(fileInputId).files[fileIndex];
var blob;
var reader = new FileReader();
reader.readAsArrayBuffer(file);
reader.onloadend = function(evt)
{
xhr = new XMLHttpRequest();
xhr.open("POST", "upload.php?name=" + base64_encode(file.name), true);
XMLHttpRequest.prototype.mySendAsBinary = function(text){
var ui8a = new Uint8Array(new Int8Array(text));
if(typeof window.Blob == "function")
{
blob = new Blob([ui8a]);
}else{
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(ui8a);
blob = bb.getBlob();
}
this.send(blob);
}
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", function(e) {
var position = e.position || e.loaded;
var total = e.totalSize || e.total;
var percentage = Math.round((position/total)*100);
console.log(percentage);
});
xhr.onreadystatechange = function()
{
if(xhr.readyState == 4)
{
if(xhr.status == 200)
{
console.log("Done");
}else{
console.log("Fail");
}
}
};
xhr.mySendAsBinary(evt.target.result);
};
}
Below is how the PHP server listen to the ArrayBuffer from JS
if(isset($_GET["name"])){
$name = base64_decode($_GET["name"]);
$loc = $name;
$inputHandler = fopen('php://input', "r");
$fileHandler = fopen($loc, "w+");
while(true) {
//$buffer = fgets($inputHandler, 1024);
$buffer = fread($inputHandler, 1000000);
if (strlen($buffer) == 0) {
fclose($inputHandler);
fclose($fileHandler);
return true;
}
//$b = base64_encode($buffer);
fwrite($fileHandler, $buffer);
}
}
The above method works well. The FileReader read the file as ArrayBuffer the upload to server. For me, migrating from ReadAsBinaryString() to ReadAsArrayBuffer() is important and ReadAsArrayBuffer() has some better performance rather than ReadAsBinaryString()
Here's some reason, why some developer relies to FileReader API:
Streaming. Using this method, the file will be stream, so we can avoid setting the php multiple time.
Easy Encrypt. As the file is sending via ArrayBuffer, it is easy for developer to Encrypt the file while upload in progress.
This method also support upload any type of file. I ve done some test and I realize that ReadAsArrayBuffer() method are more faster than ReadAsBinaryString() and direct form upload. You may try it.
Security Notice
The above code is only under test code, to use it in production, you have to consider sending the data in GET or POST under HTTPS.

how to instantiate new file object javascript

I'm having troubles instantiating a new file object in javascript.
Here's the general gist of what I'm trying to do. There is client side code that expecting a "file" type object. I need to access the file that's located on the server (game.smc), download it to the local machine and feed it to the client side code.
I did some research and found that creating a new blob object is the first step. But in the code below the blob object remains null and is never getting populated. Does the path in the xhr.open need to have the entire url? Maybe i'm missing an entire concept here not sure.
var blob = null;
var xhr = new XMLHttpRequest();
xhr.open("GET", "/Roms/game.smc");
xhr.responseType = "blob";
xhr.onload = function()
{
blob = xhr.response;
}
xhr.send();
Once I can get the blob object populated I can then do this to convert it to a file object.
function blobToFile(theBlob, fileName) {
theBlob.lastModifiedDate = new Date();
theBlob.name = fileName;
return theBlob;
}
This is what I ended up doing. Shows how to get the blob object as well as convert it to a file type.
function autoLoadGame(fileName) {
var gameLocation = '/Content/Roms/Snes/' + fileName;
var blob = null;
var xhr = new XMLHttpRequest();
xhr.open("GET", gameLocation, true);
xhr.onreadystatechange = function () {
if (xhr.readyState == XMLHttpRequest.DONE) {
var blob = xhr.response;
var file = new File([blob], fileName, { type: '', lastModified: Date.now() });
snes_readfile(file);
}
}
xhr.responseType = "blob";
xhr.send();
}

Why does AJAX output comes with wrong encoding?

I'm getting a file from a server with AJAX (Angular).The file is a simple XLSX document, sent like this:
ob_start();
$file = \PHPExcel_IOFactory::createWriter($xls, 'Excel2007');
$file->save('php://output');
$response->setContent(ob_get_clean());
$response->headers->replace(array(
'Content-Type' => 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'Content-Disposition' => 'attachment;filename=file.xlsx"'
));
When I make a request from frontend, I use Accept header too. Then I save the file with angular-file-saver using FileSaver.js and Blob.js.
But the received file is corrupt and I can't open it in Excel: it's size is (for example) 12446 bytes, but Chrome's DevTools Network tab shows responses Content-Length header as 7141 bytes.
How can I solve this problem?
UPD:
I'm sending a request like this:
$http.get(baseURL + '/' + entity + '/export/?' + condition + sort, {
headers: {'Accept': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet; charset=utf-8'}
});
and downloading file just like this:
var data = new Blob([response.data], {type: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;charset=utf-8'});
FileSaver.saveAs(data, 'file.xlsx');
The way I got around the problem was using plain JS AJAX, instead of AngularJS. (There might be a problem with AngularJS and JQuery handling binary responses.)
This should work:
var request = new XMLHttpRequest();
request.open('GET', 'http://yourserver/yourpath', true);
request.responseType = 'blob';
request.onload = function (e) {
if (this.status === 200) {
var blob = this.response;
if (window.navigator.msSaveOrOpenBlob) {
var fileNamePattern = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/;
window.navigator.msSaveBlob(blob, fileNamePattern.exec(request.getResponseHeader("content-disposition"))[1]);
} else {
var downloadLink = window.document.createElement('a');
var contentTypeHeader = request.getResponseHeader("Content-Type");
var b = new Blob([blob], { type: contentTypeHeader });
downloadLink.href = window.URL.createObjectURL(b);
var fileNamePattern = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/;
downloadLink.download = fileNamePattern.exec(request.getResponseHeader("content-disposition"))[1];
document.body.appendChild(downloadLink);
downloadLink.click();
document.body.removeChild(downloadLink);
window.URL.revokeObjectURL(b);
}
}
};
request.send();
Code is based on this and this.
FYI, I found that new Blob([response.data], ...) returns almost double the size of response.data when response.data is not returned as blob, but text/plain or application/vnd.openxmlformats-officedocument.spreadsheetml.sheet. To get around it, you need to pass it an array of bytes instead:
var i, l, d, array;
d = this.result;
l = d.length;
array = new Uint8Array(l);
for (var i = 0; i < l; i++){
array[i] = d.charCodeAt(i);
}
var b = new Blob([array], {type: 'application/octet-stream'});
window.location.href = URL.createObjectURL(b);
Code is from here.
Anyways, since the AJAX response is not correct using AngularJS, you won't get a valid xlsx file this way. You need to go with vanilla JS.

how to find out if XMLHttpRequest.send() worked

I am using XMLHttpRequest to send a file from javascript code to a django view.I need to detect,whether the file has been sent or if some error occurred.I used jquery to write the following javascript.
Ideally I would like to show the user an error message that the file was not uploaded.Is there some way to do this in javascript?
I tried to do this by returning a success/failure message from django view , putting the success/failed message as json and sending back the serialized json from the django view.For this,I made the xhr.open() non-asynchronous. I tried to print the xmlhttpRequest object's responseText .The console.log(xhr.responseText) shows
response= {"message": "success"}
What I am wondering is,whether this is the proper way to do this.In many articles,I found the warning that
Using async=false is not recommended
So,is there any way to find out whether the file has been sent,while keeping xhr.open() asynchronous?
$(document).ready(function(){
$(document).on('change', '#fselect', function(e){
e.preventDefault();
sendFile();
});
});
function sendFile(){
var form = $('#fileform').get(0);
var formData = new FormData(form);
var file = $('#fselect').get(0).files[0];
var xhr = new XMLHttpRequest();
formData.append('myfile', file);
xhr.open('POST', 'uploadfile/', false);
xhr.send(formData);
console.log('response=',xhr.responseText);
}
My django view extracts file from form data and writes to a destination folder.
def store_uploaded_file(request):
message='failed'
to_return = {}
if (request.method == 'POST'):
if request.FILES.has_key('myfile'):
file = request.FILES['myfile']
with open('/uploadpath/%s' % file.name, 'wb+') as dest:
for chunk in file.chunks():
dest.write(chunk)
message="success"
to_return['message']= message
serialized = simplejson.dumps(to_return)
if store_message == "success":
return HttpResponse(serialized, mimetype="application/json")
else:
return HttpResponseServerError(serialized, mimetype="application/json")
EDIT:
I got this working with the help of #FabrícioMatté
xhr.onreadystatechange=function(){
if (xhr.readyState==4 && xhr.status==200){
console.log('xhr.readyState=',xhr.readyState);
console.log('xhr.status=',xhr.status);
console.log('response=',xhr.responseText);
var data = $.parseJSON(xhr.responseText);
var uploadResult = data['message']
console.log('uploadResult=',uploadResult);
if (uploadResult=='failure'){
console.log('failed to upload file');
displayError('failed to upload');
}else if (uploadResult=='success'){
console.log('successfully uploaded file');
}
}
}
Something like the following code should do the job:
xmlhttp.onreadystatechange = function() {
if (xmlhttp.readyState === 4) {
var response = JSON.parse(xmlhttp.responseText);
if (xmlhttp.status === 200) {
console.log('successful');
} else {
console.log('failed');
}
}
}
XMLHttpRequest objects contain the status and readyState properties, which you can test in the xhr.onreadystatechange event to check if your request was successful.
XMLHttpRequest provides the ability to listen to various events that can occur while the request is being processed. This includes periodic progress notifications, error notifications, and so forth.
So:
function sendFile() {
var form = $('#fileform').get(0);
var formData = new FormData(form);
var file = $('#fselect').get(0).files[0]
var xhr = new XMLHttpRequest();
formData.append('myfile', file);
xhr.open('POST', 'uploadfile/', false);
xhr.addEventListener("load", transferComplete);
xhr.addEventListener("error", transferFailed);
}
function transferComplete(evt) {
console.log("The transfer is complete.");
// Do something
}
function transferFailed(evt) {
console.log("An error occurred while transferring the file.");
// Do something
}
You can read more about Using XMLHttpRequest.

BlobBuilder ruins binary data

I have a problem with BlobBuilder (Chrome11)
I try to obtain an image from server with XHR request. Then i try to save it to local FS with BlobBuilder / FileWriter. Every example on the internet is about working with text/plain mime type and these examples work fine. But when i try to write binary data obtained with XHR, file size becomes about 1.5-2 times bigger than the original file size. And it cannot be viewed in Picasa / Eye Of Gnome.
var xhr = new XMLHttpRequest();
var photoOrigUrl = 'http://www.google.ru/images/nav_logo72.png';
xhr.open('GET', photoOrigUrl, true);
xhr.onreadystatechange = function() {
if (xhr.readyState == 4 && xhr.status == 200) {
var contentType = xhr.getResponseHeader('Content-type');
fsLink.root.getFile('nav_logo72.png', {'create': true}, function(fileEntry) {
fileEntry.createWriter(function(fileWriter) {
var BlobBuilderObj = new (window.BlobBuilder || window.WebKitBlobBuilder)();
BlobBuilderObj.append(xhr.responseText);
fileWriter.write(BlobBuilderObj.getBlob(contentType));
}, function(resultError) {
console.log('writing file to file system failed ( code ' + resultError.code + ')');
});
});
}
}
xhr.send();
fsLink exists, this is extension.
The problem is that BlobBuilder.append(xhr.responseText) is detecting its argument as a UTF-8 string, which is what XHR returns, and not binary data, which is what it really is. There's a couple of tricks to get the BlobBuilder reading it as binary data instead of string data:
var xhr = new XMLHttpRequest();
var photoOrigUrl = 'http://www.google.ru/images/nav_logo72.png';
xhr.open('GET', photoOrigUrl, true);
// CHANGE 1: This stops the browser from parsing the data as UTF-8:
xhr.overrideMimeType('text/plain; charset=x-user-defined');
xhr.onreadystatechange = function() {
if (xhr.readyState == 4 && xhr.status == 200) {
var contentType = xhr.getResponseHeader('Content-type');
fsLink.root.getFile('nav_logo72.png', {'create': true}, function(fileEntry) {
fileEntry.createWriter(function(fileWriter) {
// CHANGE 2: convert string object into a binary object
var byteArray = new Uint8Array(xhr.response.length);
for (var i = 0; i < xhr.response.length; i++) {
byteArray[i] = xhr.response.charCodeAt(i) & 0xff;
}
var BlobBuilderObj = new (window.BlobBuilder || window.WebKitBlobBuilder)();
// CHANGE 3: Pass the BlobBuilder an ArrayBuffer instead of a string
BlobBuilderObj.append(byteArray.buffer);
// CHANGE 4: not sure if it's needed, but keep only the necessary
// part of the Internet Media Type string
fileWriter.write(BlobBuilderObj.getBlob(contentType.split(";")[0]));
}, function(resultError) {
console.log('writing file to file system failed ( code ' + resultError.code + ')');
});
});
}
}
xhr.send();
This gave me a file with the same length as what xhr.getResponseHeader('Content-Length') suggests it should have been.
You can use xhr.responseType='arraybuffer' though:
BlobBuilder = window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder;
var xhr = new XMLHttpRequest();
xhr.open('GET', '/path/to/image.png', true);
xhr.responseType = 'arraybuffer';
xhr.onload = function(e) {
if (this.status == 200) {
var bb = new BlobBuilder();
bb.append(this.response); // Note: not xhr.responseText
var blob = bb.getBlob('image/png');
...
}
};
xhr.send();
I think Stoive is spot on but I want to point out that instead of BlobBuilder there is now Blob constructor available that will do the trick
var b = new Blob([byteArray.buffer], {'type': 'application/type'});
I think this is more in keeping with current standards. Thanks much Stoive, very helpful.
Btw XHR2 sets a better way for implementing my task:
var xhr = new XMLHttpRequest();
xhr.open('GET', 'http://www.google.ru/images/nav_logo72.png', true);
xhr.responseType = 'blob';
xhr.onreadystatechange = function() {
if (xhr.readyState == 4 && xhr.status == 200) {
// xhr.responseBlob is needed blob data
}
}
xhr.send();
The only disappointment is that this is still a bug in Chrome: http://code.google.com/p/chromium/issues/detail?id=52486
XMLHttpRequest cannot load http://www.google.ru/images/nav_logo72.png. Origin file:// is not allowed by Access-Control-Allow-Origin.

Categories

Resources