HTML Rename download link - javascript

I have a mp3 link like this :
http://example.com/932937293723.mp3
but i want to rename it when user downloads the file to be like this
http://example.com/Artist - Title.mp3
My code :
DOWNLOAD
The mp3 file stored in remote server. And i'm not the owner of that server.
HTML download attribute seem not good solution. because it's not cross-browser. Any cross-browser solution to solve this ? Javascript maybe :D

If you insist on working from the front end, try working with the following code. The getblob method is depreciated, but you need to update that side. Let me know.
function getBinary(file){
var xhr = new XMLHttpRequest();
xhr.open("GET", file, false);
xhr.overrideMimeType("text/plain; charset=x-user-defined");
xhr.send(null);
return xhr.responseText;
}
function sendBinary(data, url){
var xhr = new XMLHttpRequest();
xhr.open("POST", url, true);
if (typeof XMLHttpRequest.prototype.sendAsBinary == "function") { // Firefox 3 & 4
var tmp = '';
for (var i = 0; i < data.length; i++) tmp += String.fromCharCode(data.charCodeAt(i) & 0xff);
data = tmp;
}
else { // Chrome 9
// http://javascript0.org/wiki/Portable_sendAsBinary
XMLHttpRequest.prototype.sendAsBinary = function(text){
var data = new ArrayBuffer(text.length);
var ui8a = new Uint8Array(data, 0);
for (var i = 0; i < text.length; i++) ui8a[i] = (text.charCodeAt(i) & 0xff);
var bb = new BlobBuilder(); // doesn't exist in Firefox 4
bb.append(data);
var blob = bb.getBlob();
this.send(blob);
}
}
xhr.sendAsBinary(data);
}
var data = getBinary("My music.mp3");
sendBinary(data,'http://www.tonycuffe.com/mp3/tailtoddle_lo.mp3');

In your back end code, you can fetch the file to your server, store it to a variable, rename it from there, define the corresponding headers, and return it. this could happen as an ajax call initiated on the javascript click.
Post further details about your backed and i can help you more.

You can use something like below (ASP.NET)
In ASPX
Download
In ASP.NET
Response.ContentType = "audio/mpeg3";
Response.AddHeader("content-disposition", "attachment;filename=New_file_name.mp3");
Server.Transfer(decoded_URL_of_MP3_file);
Look here for other MIME types
Update#1 - Using Javascript alone, you can try something like this, though I've not tested in different browsers
function Download(url, fancyFileName)
{
var file = document.createElement('a');
file.href = url;
file.target = '_blank';
file.download = fancyFileName;
var event = document.createEvent('Event');
event.initEvent('click', true, true);
file.dispatchEvent(event);
window.URL.revokeObjectURL(file.href);
}
Download('http://server.com/file.mp3','Artist_file.mp3');

Related

How to get "file" parameter of http request in javascript?

I'm trying to program a java script script that based on whether a user logs in properly or not will redirect them to a separate PHP script. The issue is that I can't seem to figure out how to get the file parameter of the request so that I can see if the request I'm looking for is there. How do I get the file parameter of a request in java script?
Sorry for misconceptions, what i mean by the file attribute is what is under the "file" section for each request in the following.
example
So if under the file tab of the packet, it set a certain file, how would i differentiate?
It's not clear what you're asking.
The part " so that I can see if the request I'm looking for is there" tells me, you want to debug your website, or at least, that's my interpretation of it.
If you use Chrome or Firefox Developer Edition, you can press F12 (or CTRL + SHIFT + J) to open the developer console.
Change to the tab "Network, and you'll see all the XMLHTTPRequests.
Click on a specific request, and you'll see its details.
A basic XmlHttpReuqest goes like this:
function reqListener () {
console.log(this.responseText);
}
var oReq = new XMLHttpRequest();
oReq.addEventListener("load", reqListener);
oReq.open("GET", "http://www.example.org/example.txt");
oReq.send();
And you get the result of your request in the callback function reqListener.
See also https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/Using_XMLHttpRequest
If you want to get the request handler's URL, that goes like this:
function reqListener (e) {
//console.log(this.responseText);
console.log(e);
console.log(e.currentTarget.responseURL);
}
var oReq = new XMLHttpRequest();
oReq.addEventListener("load", reqListener);
oReq.open("GET", "https://stackoverflow.com/questions/58407228");
oReq.send();
And if you want to get a parameter called "file" inside an url, this goes like
function getUrlVars(urlHref)
{
var vars = [], hash;
var hashes = urlHref.slice(urlHref.indexOf('?') + 1).split('&');
var i;
for (i = 0; i < hashes.length; i++)
{
hash = hashes[i].split('=');
vars.push(decodeURIComponent(hash[0]));
vars[decodeURIComponent(hash[0])] = decodeURIComponent(hash[1]);
} // Next i
return vars;
} // End Function getUrlVars
var dictParameters = getUrlVars("http://www.example.com/handler?file=bla.bin");
if (dictParameters.contains("file"))
{
console.log(dictParameters["file"]);
}
As for XMLHTTPRequest, it doesn't have a property called file.
Also, this is 2019, you should be using the FETCH-API with async and await, not the XMLHttpRequest-API, which doesn't use promises.
Here's a getting started overview.
Edit:
Ah, I see:
If you have a url, such as
var url = "http://www6.scratch99.com/web-development/javascript/test.js?abc=def";
you do
var url = "http://www6.scratch99.com";
var urlParts = url.replace('http://','').replace('https://','').split(/[/?#]/);
var domain = urlParts[0];
to get the domain part. Then you subtract the domain (+protocol), and end it at ? or #:
Full code:
var url = "http://www6.scratch99.com/web-development/javascript/test.js?abc=def";
// var url = "http://www6.scratch99.com";
// var url = "http://www6.scratch99.com?test=123";
var protocol = url.substr(0, url.indexOf(":") + 3)
var urlParts = url.substr(protocol.length).split(/[/?#]/);
var domain = urlParts[0];
var fileParts = url.substr(protocol.length + domain.length);
var file = fileParts.split(/[?#]/)[0];
and if you want the filename only:
var pathParts = file.split('/');
var fileOnly = pathParts[pathParts.length-1];

Migrate FileReader ReadAsBinaryString() to ReadAsArrayBuffer() or ReadAsText()

I realize that the new Mozilla Firefox return allocation size overflow (on FileReader.ReadAsBinaryString()) when the file bigger than 200MB (something like that).
Here's some of my code on test for client web browser:
function upload(fileInputId, fileIndex)
{
var file = document.getElementById(fileInputId).files[fileIndex];
var blob;
var reader = new FileReader();
reader.readAsBinaryString(file);
reader.onloadend = function(evt)
{
xhr = new XMLHttpRequest();
xhr.open("POST", "upload.php", true);
XMLHttpRequest.prototype.mySendAsBinary = function(text){
var data = new ArrayBuffer(text.length);
var ui8a = new Uint8Array(data, 0);
for (var i = 0; i < text.length; i++){
ui8a[i] = (text.charCodeAt(i) & 0xff);
}
if(typeof window.Blob == "function")
{
blob = new Blob([data]);
}else{
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(data);
blob = bb.getBlob();
}
this.send(blob);
}
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", function(e) {
var position = e.position || e.loaded;
var total = e.totalSize || e.total;
var percentage = Math.round((position/total)*100);
});
xhr.onreadystatechange = function()
{
if(xhr.readyState == 4)
{
if(xhr.status == 200)
{
console.log("Done");
}else{
console.log("Fail");
}
}
};
xhr.mySendAsBinary(evt.target.result);
};
}
So I tried change it to FileReader.ReadAsArrayBuffer(), the error has not shown up but the data are not the same (as it's not read as binary string).
Did anyone has any solution to solve this problem? Is there any way that we can upload bigger file from JS to Web Server in raw/string other than FileReader implementation?
I read on Mozilla JS Documentation that said:
This feature is non-standard and is not on a standards track. Do not
use it on production sites facing the Web: it will not work for every
user. There may also be large incompatibilities between
implementations and the behavior may change in the future. - Mozilla
If not ReadAsBinaryString, the how to implement ReadAsArrayBuffer or ReadAsText
To send Files to a web-server, you simply don't need js. HTML alone is well able to do this with the <form> element.
Now if you want to go through js, for e.g catch the different ProgressEvents, then you can send directly your File, no need to read it whatsoever on your side.
To do this, you've got two (or three) solutions.
If your server is able to handle PUT requests, you can simply xhr.send(file);.
Otherwise, you'd have to go through a FormData.
// if you really want to go the XHR way
document.forms[0].onsubmit = function handleSubmit(evt) {
if(!window.FormData) { // old browser use the <form>
return;
}
// now we handle the submit through js
evt.preventDefault();
var fD = new FormData(this);
var xhr = new XMLHttpRequest();
xhr.onprogress = function handleProgress(evt){};
xhr.onload = function handleLoad(evt){};
xhr.onerror = function handleError(evt){};
xhr.open(this.method, this.action);
// xhr.send(fD); // won't work in StackSnippet
log(fD, this.method, this.action); // so we just log its content
};
function log(formData, method, action) {
console.log('would have sent');
for(let [key, val] of formData.entries())
console.log(key, val);
console.log('through', method);
console.log('to', action);
}
<!-- this in itself is enough -->
<form method="POST" action="your_server.page">
<input type="file" name="file_upload">
<input type="submit">
</form>
Now, you sent a comment saying that you can't upload Files bigger than 1GB to your server.
This limitation is only due to your server's config, so the best if you want to accept such big files is to configure it correctly.
But if you really want to send your File by chunks, even then don't get off of the Blob interface.
Indeed Blobs have a slice() method, so use it.
document.forms[0].onsubmit = function handleSubmit(evt) {
evt.preventDefault();
var file = this.elements[0].files[0];
var processed = 0;
if(file) {
// var MAX_CHUNK_SIZE = Math.min(file.size, server_max_size);
// for demo we just split in 10 chunks
var MAX_CHUNK_SIZE = file.size > 10 ? (file.size / 10) | 0 : 1;
loadChunk(0);
}
function loadChunk(start) {
var fD = new FormData();
var sliced = file.slice(start, start+MAX_CHUNK_SIZE);
processed += sliced.size; // only for demo
fD.append('file_upload', sliced, file.name);
fD.append('starting_index', start);
if(start + MAX_CHUNK_SIZE >= file.size) {
fD.append('last_chunk', true);
}
var xhr = new XMLHttpRequest();
xhr.open('POST', 'your_server.page');
xhr.onload = function onchunkposted(evt) {
if(start + MAX_CHUNK_SIZE >= file.size) {
console.log('All done. Original file size: %s, total of chunks sizes %s', file.size, processed);
return;
}
loadChunk(start + MAX_CHUNK_SIZE);
};
// xhr.send(fD);
log(fD);
setTimeout(xhr.onload, 200); // fake XHR onload
}
};
function log(formData, method, action) {
console.log('would have sent');
for(let [key, val] of formData.entries())
console.log(key, val);
}
<form method="POST" action="your_server.page">
<input type="file" name="file_upload">
<input type="submit">
</form>
But you absolutely don't need to go through a FileReader for this operation.
Actually the only case where it could make sense to use a FileReader here would be for some Android browsers that don't support passing Blob into a FormData, even though they don't give a single clue about it.
So in this case, you'd have to set up your server to let you know the request was empty, and then only read the File as a dataURI that you would send in-place of the original File.
after a long week of research and sleepless nights, you can't upload binary strings without breaking it, also base64 doesn't work for all files, only images, the journey from the client-side to the server breaks the bytes being sent
Kaiido statement is correct
To send Files to a web-server, you simply don't need js
But that doesn't answer my question. Using the Simple XMLHttpRequest() can upload the file and track those progress as well. But still, it's not it. The direct upload, either from the <form> or using XMLHttpRequest() will need to increase your upload limit in php setting. This method is not convenience for me. How if the client upload file as 4GB? So I need to increase to 4GB. Then next time, client upload file as 6GB, then I have to increase to 6GB.
Using the slice() method is make sense for bigger file as we can send it part by part to server. But this time I am not using it yet.
Here's some of my test the worked as I want. I hope some expert could correct me if I am wrong.
My Upload.js
function upload(fileInputId, fileIndex)
{
var file = document.getElementById(fileInputId).files[fileIndex];
var blob;
var reader = new FileReader();
reader.readAsArrayBuffer(file);
reader.onloadend = function(evt)
{
xhr = new XMLHttpRequest();
xhr.open("POST", "upload.php?name=" + base64_encode(file.name), true);
XMLHttpRequest.prototype.mySendAsBinary = function(text){
var ui8a = new Uint8Array(new Int8Array(text));
if(typeof window.Blob == "function")
{
blob = new Blob([ui8a]);
}else{
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(ui8a);
blob = bb.getBlob();
}
this.send(blob);
}
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", function(e) {
var position = e.position || e.loaded;
var total = e.totalSize || e.total;
var percentage = Math.round((position/total)*100);
console.log(percentage);
});
xhr.onreadystatechange = function()
{
if(xhr.readyState == 4)
{
if(xhr.status == 200)
{
console.log("Done");
}else{
console.log("Fail");
}
}
};
xhr.mySendAsBinary(evt.target.result);
};
}
Below is how the PHP server listen to the ArrayBuffer from JS
if(isset($_GET["name"])){
$name = base64_decode($_GET["name"]);
$loc = $name;
$inputHandler = fopen('php://input', "r");
$fileHandler = fopen($loc, "w+");
while(true) {
//$buffer = fgets($inputHandler, 1024);
$buffer = fread($inputHandler, 1000000);
if (strlen($buffer) == 0) {
fclose($inputHandler);
fclose($fileHandler);
return true;
}
//$b = base64_encode($buffer);
fwrite($fileHandler, $buffer);
}
}
The above method works well. The FileReader read the file as ArrayBuffer the upload to server. For me, migrating from ReadAsBinaryString() to ReadAsArrayBuffer() is important and ReadAsArrayBuffer() has some better performance rather than ReadAsBinaryString()
Here's some reason, why some developer relies to FileReader API:
Streaming. Using this method, the file will be stream, so we can avoid setting the php multiple time.
Easy Encrypt. As the file is sending via ArrayBuffer, it is easy for developer to Encrypt the file while upload in progress.
This method also support upload any type of file. I ve done some test and I realize that ReadAsArrayBuffer() method are more faster than ReadAsBinaryString() and direct form upload. You may try it.
Security Notice
The above code is only under test code, to use it in production, you have to consider sending the data in GET or POST under HTTPS.

Client download of a server generated zip file

Before somebody says, "duplicate", I just want to make sure, that folks know, that I have already reviewed these questions:
1) Uses angular and php, not sure what is happening here (I don't know PHP): Download zip file and trigger "save file" dialog from angular method
2) Can't get this answer to do anything: how to download a zip file using angular
3) This person can already download, which is past the point I'm trying to figure out:
Download external zip file from angular triggered on a button action
4) No answer for this one:
download .zip file from server in nodejs
5) I don't know what language this even is:
https://stackoverflow.com/questions/35596764/zip-file-download-using-angularjs-directive
Given those questions, if this is still a duplicate, I apologize. Here is, yet, another version of this question.
My angular 1.5.X client gives me a list of titles, of which each have an associated file. My Node 4.X/Express 4.X server takes that list, gets the file locations, creates a zip file, using express-zip from npm, and then streams that file back in the response. I then want my client to initiate the browser's "download a file" option.
Here's my client code (Angular 1.5.X):
function bulkdownload(titles){
titles = titles || [];
if ( titles.length > 0 ) {
$http.get('/query/bulkdownload',{
params:{titles:titles},
responseType:'arraybuffer'
})
.then(successCb,errorCb)
.catch(exceptionCb);
}
function successCb(response){
// This is the part I believe I cannot get to work, my code snippet is below
};
function errorCb(error){
alert('Error: ' + JSON.stringify(error));
};
function exceptionCb(ex){
alert('Exception: ' + JSON.stringify(ex));
};
};
Node (4.X) code with express-zip, https://www.npmjs.com/package/express-zip:
router.get('/bulkdownload',function(req,resp){
var titles = req.query.titles || [];
if ( titles.length > 0 ){
utils.getFileLocations(titles).
then(function(files){
let filename = 'zipfile.zip';
// .zip sets Content-Type and Content-disposition
resp.zip(files,filename,console.log);
},
_errorCb)
}
});
Here's my successCb in my client code (Angular 1.5.X):
function successCb(response){
var URL = $window.URL || $window.webkitURL || $window.mozURL || $window.msURL;
if ( URL ) {
var blob = new Blob([response.data],{type:'application/zip'});
var url = URL.createObjectURL(blob);
$window.open(url);
}
};
The "blob" part seems to work fine. Checking it in IE's debugger, it does look like a file stream of octet information. Now, I believe I need to get that blob into the some HTML5 directive, to initiate the "Save File As" from the browser. Maybe? Maybe not?
Since 90%+ of our users are using IE11, I test all of my angular in PhantomJS (Karma) and IE. When I run the code, I get the old "Access is denied" error in an alert window:
Exception: {"description":"Access is denied...<stack trace>}
Suggestions, clarifications, answers, etc. are welcome!
Use this one:
var url="YOUR ZIP URL HERE";
window.open(url, '_blank');
var zip_file_path = "" //put inside "" your path with file.zip
var zip_file_name = "" //put inside "" file name or something
var a = document.createElement("a");
document.body.appendChild(a);
a.style = "display: none";
a.href = zip_file_path;
a.download = zip_file_name;
a.click();
document.body.removeChild(a);
As indicated in this answer, I have used the below Javascript function and now I am able to download the byte[] array content successfully.
Function to convert byte array stream (type of string) to blob object:
var b64toBlob = function(b64Data, contentType, sliceSize) {
contentType = contentType || '';
sliceSize = sliceSize || 512;
var byteCharacters = atob(b64Data);
var byteArrays = [];
for (var offset = 0; offset < byteCharacters.length; offset += sliceSize) {
var slice = byteCharacters.slice(offset, offset + sliceSize);
var byteNumbers = new Array(slice.length);
for (var i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
var byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
var blob = new Blob(byteArrays, {type: contentType});
return blob;
};
An this is how I call this function and save the blob object with FileSaver.js (getting data via Angular.js $http.get):
$http.get("your/api/uri").success(function(data, status, headers, config) {
//Here, data is type of string
var blob = b64toBlob(data, 'application/zip');
var fileName = "download.zip";
saveAs(blob, fileName);
});
Note: I am sending the byte[] array (Java-Server-Side) like this:
byte[] myByteArray = /*generate your zip file and convert into byte array*/ new byte[]();
return new ResponseEntity<byte[]>(myByteArray , headers, HttpStatus.OK);
I updated my bulkdownload method to use $window.open(...) instead of $http.get(...):
function bulkdownload(titles){
titles = titles || [];
if ( titles.length > 0 ) {
var url = '/query/bulkdownload?';
var len = titles.length;
for ( var ii = 0; ii < len; ii++ ) {
url = url + 'titles=' + titles[ii];
if ( ii < len-1 ) {
url = url + '&';
}
}
$window.open(url);
}
};
I have only tested this in IE11.

Why does AJAX output comes with wrong encoding?

I'm getting a file from a server with AJAX (Angular).The file is a simple XLSX document, sent like this:
ob_start();
$file = \PHPExcel_IOFactory::createWriter($xls, 'Excel2007');
$file->save('php://output');
$response->setContent(ob_get_clean());
$response->headers->replace(array(
'Content-Type' => 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'Content-Disposition' => 'attachment;filename=file.xlsx"'
));
When I make a request from frontend, I use Accept header too. Then I save the file with angular-file-saver using FileSaver.js and Blob.js.
But the received file is corrupt and I can't open it in Excel: it's size is (for example) 12446 bytes, but Chrome's DevTools Network tab shows responses Content-Length header as 7141 bytes.
How can I solve this problem?
UPD:
I'm sending a request like this:
$http.get(baseURL + '/' + entity + '/export/?' + condition + sort, {
headers: {'Accept': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet; charset=utf-8'}
});
and downloading file just like this:
var data = new Blob([response.data], {type: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;charset=utf-8'});
FileSaver.saveAs(data, 'file.xlsx');
The way I got around the problem was using plain JS AJAX, instead of AngularJS. (There might be a problem with AngularJS and JQuery handling binary responses.)
This should work:
var request = new XMLHttpRequest();
request.open('GET', 'http://yourserver/yourpath', true);
request.responseType = 'blob';
request.onload = function (e) {
if (this.status === 200) {
var blob = this.response;
if (window.navigator.msSaveOrOpenBlob) {
var fileNamePattern = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/;
window.navigator.msSaveBlob(blob, fileNamePattern.exec(request.getResponseHeader("content-disposition"))[1]);
} else {
var downloadLink = window.document.createElement('a');
var contentTypeHeader = request.getResponseHeader("Content-Type");
var b = new Blob([blob], { type: contentTypeHeader });
downloadLink.href = window.URL.createObjectURL(b);
var fileNamePattern = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/;
downloadLink.download = fileNamePattern.exec(request.getResponseHeader("content-disposition"))[1];
document.body.appendChild(downloadLink);
downloadLink.click();
document.body.removeChild(downloadLink);
window.URL.revokeObjectURL(b);
}
}
};
request.send();
Code is based on this and this.
FYI, I found that new Blob([response.data], ...) returns almost double the size of response.data when response.data is not returned as blob, but text/plain or application/vnd.openxmlformats-officedocument.spreadsheetml.sheet. To get around it, you need to pass it an array of bytes instead:
var i, l, d, array;
d = this.result;
l = d.length;
array = new Uint8Array(l);
for (var i = 0; i < l; i++){
array[i] = d.charCodeAt(i);
}
var b = new Blob([array], {type: 'application/octet-stream'});
window.location.href = URL.createObjectURL(b);
Code is from here.
Anyways, since the AJAX response is not correct using AngularJS, you won't get a valid xlsx file this way. You need to go with vanilla JS.

How to Asynchronously write an ArrayBuffer directly to file using nsIArrayBufferInputStream in Firefox extension

To make a long story short:
How to Asynchronously write an ArrayBuffer directly to file using nsIArrayBufferInputStream in Firefox extension ?
It seems that MDN does not have any documentation on nsIArrayBufferInputStream.
I know I can use nsIStringInputStream and convert the BufferArray to String, but this poses a big performance hit
also converting ArrayBuffer to string using this code:
String.fromCharCode.apply(null, new Uint16Array(buf));
Does not work if the buffer is 500 KB or bigger, so we must loop over it one char at a time:
for (let i = 0; i < buf.length; i++){
s += String.fromCharCode(buf16[i]);
}
Or, I can use nsIBinaryOutputStream.writeByteArray but it cannot be used with NetUtil.asyncCopy (or can it?)
//this works ok, but is synchronous :-(
function writeBinFile(aFile, data){
Components.utils.import("resource://gre/modules/FileUtils.jsm");
let nsFile = Components.Constructor("#mozilla.org/file/local;1", Ci.nsILocalFile, "initWithPath");
if(typeof aFile === 'string') aFile = nsFile(aFile);
var stream = FileUtils.openSafeFileOutputStream(aFile, FileUtils.MODE_WRONLY | FileUtils.MODE_CREATE);
var binaryStream = Cc["#mozilla.org/binaryoutputstream;1"].createInstance(Ci.nsIBinaryOutputStream);
binaryStream.setOutputStream(stream);
binaryStream.writeByteArray(data, data.length);
FileUtils.closeSafeFileOutputStream(stream);
}
And the long story is...
I have been trying to use nsIArrayBufferInputStream
http://dxr.mozilla.org/mozilla-central/source/netwerk/base/public/nsIArrayBufferInputStream.idl
but with no success. the code I tried:
function fileWrite(file, data, callback) {
Cu.import("resource://gre/modules/FileUtils.jsm");
Cu.import("resource://gre/modules/NetUtil.jsm");
let nsFile = Components.Constructor("#mozilla.org/file/local;1", Ci.nsILocalFile, "initWithPath");
if (typeof file == 'string') file = new nsFile(file);
let ostream = FileUtils.openSafeFileOutputStream(file)
let istream = Cc["#mozilla.org/io/arraybuffer-input-stream;1"].createInstance(Ci.nsIArrayBufferInputStream);
istream.setData(data, 0, data.length);
let bstream = Cc["#mozilla.org/binaryinputstream;1"].createInstance(Ci.nsIBinaryInputStream);
bstream.setInputStream(istream);
//NetUtil.asyncCopy(istream, ostream,
NetUtil.asyncCopy(bstream, ostream,
function(status) {
if (callback) callback(Components.isSuccessCode(status));
}
);
}
The ArrayBuffer data param is the responce from XMLHttpRequest:
function getBinFile(url, dir) {
let nsFile = Components.Constructor("#mozilla.org/file/local;1", Ci.nsILocalFile, "initWithPath");
let oReq = new XMLHttpRequest();
oReq.open("GET", url, true);
oReq.responseType = "arraybuffer";
oReq.onload = function(oEvent) {
var arrayBuffer = oReq.response;
if (arrayBuffer) {
//let byteArray = new Uint8Array(arrayBuffer);
let byteArray = arrayBuffer;
dir = /\\$/.test(dir) ? dir: dir + '\\';
let file = nsFile(dir + decodeURIComponent(url.split('/').pop()));
fileWrite(file, byteArray);
}
};
oReq.send(null);
}
calling like this:
getBinFile( 'http://....', 'c:\\demo\\');
A file is created but with no contents!
I'm answering myself in case anyone stumbles upon this question...
with help from Josh Matthews (of Mozilla) i found the answer:
use byteLength instead of length
istream.setData(data, 0, data.byteLength);

Categories

Resources