File upload in AngularJS causes browser out of memory error - javascript

I have an AngularJS code to perform upload of files. At first I thought it works OK but when I've tried to upload bigger files (e.g. 5 files, 10 MB each) then I saw that this code has very poor performance regarding memory handling.
For example: when I attach 5 files (10 MB each = 50 MB in total) then peak browser process memory demand reaches 2 GB!
I've tested it in Firefox, Chrome and IE. Additionally, in Chrome when browser process reaches 3 GB of memory committed then the page crashes.
In Firefox I can see an error in console when trying to upload more than 80 MB at once:
out of memory
I've tried to modify the parts where the actual request is made and removed data: JSON.stringify(formattedData) which was the first bottleneck. And it turned out that stringifying is redundant there.
But the second bottleneck still exists which is the function binArrayToJson which takes the ArrayBuffer and reads the data into the byte array.
var binArrayToJson = function (binArray) {
var str = [binArray.length];
for (var i = 0, binLength = binArray.length; i < binLength; i++) {
str[i] = binArray[i];
}
return str;
}
var applyAttachments = function (data1) {
angular.forEach(data1,
function (value, key) {
if (key === "Attachments") {
for (var i = 0; i < value.length; i++) {
if (value[i].file) { //already saved item don't contain 'file' property
var reader = new FileReader();
reader.onloadend = (function (f) {
return function (result) {
var arrayBuffer = result.target.result;
value[f].fileContent = binArrayToJson(new Uint8Array(arrayBuffer));
value[f].isUploaded = true;
};
})(i);
reader.readAsArrayBuffer(value[i].file);
}
}
}
});
};
var createMVCModel = function (output) {
var defaultStringValue = "";
return {
id: output.id,
Name: output.name || defaultStringValue,
Status: output.status || "Draft",
Date: output.date || null
Attachments: output.attachments || []
};
};
var saveModel = function (data, url) {
var formattedData = createMVCModel(data);
var deferred = $q.defer();
applyAttachments(formattedData);
var check = function () {
if (allNewFilesUploaded(formattedData) === true) {
$http({
url: url,
method: "POST",
data: formattedData,
headers: { 'Content-Type': undefined }
})
.then(function (result) {
deferred.resolve(result);
},
function (result) {
deferred.reject(result);
});
} else {
setTimeout(check, 1000);
}
}
check();
return deferred.promise;
};
I omitted the parts where the following requirements are being met:
checking number of files (limit is 10)
checking size of each file (limit is 10 MB each)
checking permitted file extensions
sending rest of the (text) data along with files to the server (ASP.NET MVC method)

Doing Multiple $http.post Requests Directly from a FileList
To avoid memory problems, send the files directly:
$scope.upload = function(url, fileList) {
var config = { headers: { 'Content-Type': undefined },
transformResponse: angular.identity
};
var promises = fileList.map(function(file,index) {
config.params = {'n': index, 'name': file.name};
return $http.post(url, file, config);
});
return $q.all(promises);
};
When sending a POST with a File object, it is important to set 'Content-Type': undefined. The XHR send method will then detect the File object and automatically set the content type.

Related

Why uploading Word OfficeJS PDF document using the API saves an empty PDF document

I have used the below JS code provided by Microsoft in order to save a document in PDF:
Office.context.document.getFileAsync(Office.FileType.Pdf,
function(result) {
if (result.status == "succeeded") {
var myFile = result.value;
var sliceCount = myFile.sliceCount;
var slicesReceived = 0, gotAllSlices = true, docdataSlices = [];
console.log("File size:" + myFile.size + " #Slices: " + sliceCount);
// Now, you can call getSliceAsync to download the files,
// as described in the previous code segment (compressed format).
// Get the file slices.
getSliceAsync(myFile, 0, sliceCount, gotAllSlices, docdataSlices, slicesReceived);
myFile.closeAsync();
}
else {
console.log("Error:", result.error.message);
}
}
);
function getSliceAsync(file, nextSlice, sliceCount, gotAllSlices, docdataSlices, slicesReceived) {
file.getSliceAsync(nextSlice, function (sliceResult) {
if (sliceResult.status == "succeeded") {
if (!gotAllSlices) { // Failed to get all slices, no need to continue.
return;
}
// Got one slice, store it in a temporary array.
// (Or you can do something else, such as
// send it to a third-party server.)
docdataSlices[sliceResult.value.index] = sliceResult.value.data;
if (++slicesReceived == sliceCount) {
// All slices have been received.
file.closeAsync();
onGotAllSlices(docdataSlices);
}
else {
getSliceAsync(file, ++nextSlice, sliceCount, gotAllSlices, docdataSlices, slicesReceived);
}
}
else {
gotAllSlices = false;
file.closeAsync();
console.log("getSliceAsync Error:", sliceResult.error.message);
}
});
}
function onGotAllSlices(docdataSlices) {
var docdata = [];
for (var i = 0; i < docdataSlices.length; i++) {
docdata = docdata.concat(docdataSlices[i]);
}
var fileContent = new String();
for (var j = 0; j < docdata.length; j++) {
fileContent += String.fromCharCode(docdata[j]);
}
console.log('Final PDF content is received and stored in fileContent.');
send_file_content(fileContent);
}
function send_file_content(word_doc) {
var formData = new FormData();
var blob = new Blob([word_doc], { type: "application/pdf"});
formData.append("file", blob);
$.ajax({
type: 'POST',
url: 'My-upload-URL',
data: formData,
processData: false,
contentType: false
}).done(function(data) {
console.log('* Word Document successfully uploaded: ', data.filepath);
});
}
I'm pretty sure that the server side is OK as I have uploaded zillions of PDF documents and it works as expected, but when I upload Word PDF Document via the above JS code I get a blank page on server side. If the word document contains 3 pages then I will get 3 blank pages on server-side as a PDF file.
The Microsoft Documentation goes with charCodeAt function which ruins the data and makes a blank PDF document.
Instead of using that function, I used Uint8Array on the byte array directly:
var blob = new Blob([new Uint8Array(myFinalByteArray)], { type: 'application/pdf' });
And then uploaded the Blob using FormData into a remote server. The problem gone away with this approach.

Chrome memory issue - File API + AngularJS

I have a web app that needs to upload large files to Azure BLOB storage. My solution uses HTML5 File API to slice into chunks which are then put as blob blocks, the IDs of the blocks are stored in an array and then the blocks are committed as a blob.
The solution works fine in IE. On 64 bit Chrome I have successfully uploaded 4Gb files but see very heavy memory usage (2Gb+). On 32 bit Chrome the specific chrome process will get to around 500-550Mb and then crash.
I can't see any obvious memory leaks or things I can change to help garbage collection. I store the block IDs in an array so obviously there will be some memory creeep but this shouldn't be massive. It's almost as if the File API is holding the whole file it slices into memory.
It's written as an Angular service called from a controller, I think just the service code is pertinent:
(function() {
'use strict';
angular
.module('app.core')
.factory('blobUploadService',
[
'$http', 'stringUtilities',
blobUploadService
]);
function blobUploadService($http, stringUtilities) {
var defaultBlockSize = 1024 * 1024; // Default to 1024KB
var stopWatch = {};
var state = {};
var initializeState = function(config) {
var blockSize = defaultBlockSize;
if (config.blockSize) blockSize = config.blockSize;
var maxBlockSize = blockSize;
var numberOfBlocks = 1;
var file = config.file;
var fileSize = file.size;
if (fileSize < blockSize) {
maxBlockSize = fileSize;
}
if (fileSize % maxBlockSize === 0) {
numberOfBlocks = fileSize / maxBlockSize;
} else {
numberOfBlocks = parseInt(fileSize / maxBlockSize, 10) + 1;
}
return {
maxBlockSize: maxBlockSize,
numberOfBlocks: numberOfBlocks,
totalBytesRemaining: fileSize,
currentFilePointer: 0,
blockIds: new Array(),
blockIdPrefix: 'block-',
bytesUploaded: 0,
submitUri: null,
file: file,
baseUrl: config.baseUrl,
sasToken: config.sasToken,
fileUrl: config.baseUrl + config.sasToken,
progress: config.progress,
complete: config.complete,
error: config.error,
cancelled: false
};
};
/* config: {
baseUrl: // baseUrl for blob file uri (i.e. http://<accountName>.blob.core.windows.net/<container>/<blobname>),
sasToken: // Shared access signature querystring key/value prefixed with ?,
file: // File object using the HTML5 File API,
progress: // progress callback function,
complete: // complete callback function,
error: // error callback function,
blockSize: // Use this to override the defaultBlockSize
} */
var upload = function(config) {
state = initializeState(config);
var reader = new FileReader();
reader.onloadend = function(evt) {
if (evt.target.readyState === FileReader.DONE && !state.cancelled) { // DONE === 2
var uri = state.fileUrl + '&comp=block&blockid=' + state.blockIds[state.blockIds.length - 1];
var requestData = new Uint8Array(evt.target.result);
$http.put(uri,
requestData,
{
headers: {
'x-ms-blob-type': 'BlockBlob',
'Content-Type': state.file.type
},
transformRequest: []
})
.success(function(data, status, headers, config) {
state.bytesUploaded += requestData.length;
var percentComplete = ((parseFloat(state.bytesUploaded) / parseFloat(state.file.size)) * 100
).toFixed(2);
if (state.progress) state.progress(percentComplete, data, status, headers, config);
uploadFileInBlocks(reader, state);
})
.error(function(data, status, headers, config) {
if (state.error) state.error(data, status, headers, config);
});
}
};
uploadFileInBlocks(reader, state);
return {
cancel: function() {
state.cancelled = true;
}
};
};
function cancel() {
stopWatch = {};
state.cancelled = true;
return true;
}
function startStopWatch(handle) {
if (stopWatch[handle] === undefined) {
stopWatch[handle] = {};
stopWatch[handle].start = Date.now();
}
}
function stopStopWatch(handle) {
stopWatch[handle].stop = Date.now();
var duration = stopWatch[handle].stop - stopWatch[handle].start;
delete stopWatch[handle];
return duration;
}
var commitBlockList = function(state) {
var uri = state.fileUrl + '&comp=blocklist';
var requestBody = '<?xml version="1.0" encoding="utf-8"?><BlockList>';
for (var i = 0; i < state.blockIds.length; i++) {
requestBody += '<Latest>' + state.blockIds[i] + '</Latest>';
}
requestBody += '</BlockList>';
$http.put(uri,
requestBody,
{
headers: {
'x-ms-blob-content-type': state.file.type
}
})
.success(function(data, status, headers, config) {
if (state.complete) state.complete(data, status, headers, config);
})
.error(function(data, status, headers, config) {
if (state.error) state.error(data, status, headers, config);
// called asynchronously if an error occurs
// or server returns response with an error status.
});
};
var uploadFileInBlocks = function(reader, state) {
if (!state.cancelled) {
if (state.totalBytesRemaining > 0) {
var fileContent = state.file.slice(state.currentFilePointer,
state.currentFilePointer + state.maxBlockSize);
var blockId = state.blockIdPrefix + stringUtilities.pad(state.blockIds.length, 6);
state.blockIds.push(btoa(blockId));
reader.readAsArrayBuffer(fileContent);
state.currentFilePointer += state.maxBlockSize;
state.totalBytesRemaining -= state.maxBlockSize;
if (state.totalBytesRemaining < state.maxBlockSize) {
state.maxBlockSize = state.totalBytesRemaining;
}
} else {
commitBlockList(state);
}
}
};
return {
upload: upload,
cancel: cancel,
startStopWatch: startStopWatch,
stopStopWatch: stopStopWatch
};
};
})();
Are there any ways I can move the scope of objects to help with Chrome GC? I have seen other people mentioning similar issues but understood Chromium had resolved some.
I should say my solution is heavily based on Gaurav Mantri's blog post here:
http://gauravmantri.com/2013/02/16/uploading-large-files-in-windows-azure-blob-storage-using-shared-access-signature-html-and-javascript/#comment-47480
I can't see any obvious memory leaks or things I can change to help
garbage collection. I store the block IDs in an array so obviously
there will be some memory creeep but this shouldn't be massive. It's
almost as if the File API is holding the whole file it slices into
memory.
You are correct. The new Blobs created by .slice() are being held in memory.
The solution is to call Blob.prototype.close() on the Blob reference when processing Blob or File object is complete.
Note also, at javascript at Question also creates a new instance of FileReader if upload function is called more than once.
4.3.1. The slice method
The slice() method returns a new Blob object with bytes ranging
from the optional start parameter up to but not including the
optional end parameter, and with a type attribute that is the
value of the optional contentType parameter.
Blob instances exist for the life of document. Though Blob should be garbage collected once removed from Blob URL Store
9.6. Lifetime of Blob URLs
Note: User agents are free to garbage collect resources removed from
the Blob URL Store.
Each Blob must have an internal snapshot state, which must be
initially set to the state of the underlying storage, if any such
underlying storage exists, and must be preserved through
StructuredClone. Further normative definition of snapshot state can
be found for Files.
4.3.2. The close method
The close() method is said to close a Blob, and must act as
follows:
If the readability state of the context object is CLOSED, terminate this algorithm.
Otherwise, set the readability state of the context object to CLOSED.
If the context object has an entry in the Blob URL Store, remove the entry that corresponds to the context object.
If Blob object is passed to URL.createObjectURL(), call URL.revokeObjectURL() on Blob or File object, then call .close().
The revokeObjectURL(url) static method
Revokes the Blob URL provided in the string url by removing the corresponding entry from the Blob URL Store. This method must act
as follows:
1. If the url refers to a Blob that has a readability state of CLOSED OR if the value provided for the url argument is
not a Blob URL, OR if the value provided for the url argument does
not have an entry in the Blob URL Store, this method call does
nothing. User agents may display a message on the error console.
2. Otherwise, user agents must remove the entry from the Blob URL Store for url.
You can view the result of these calls by opening
chrome://blob-internals
reviewing details of before and after calls which create Blob and close Blob.
For example, from
xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
Refcount: 1
Content Type: text/plain
Type: data
Length: 3
to
xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
Refcount: 1
Content Type: text/plain
following call to .close(). Similarly from
blob:http://example.com/c2823f75-de26-46f9-a4e5-95f57b8230bd
Uuid: 29e430a6-f093-40c2-bc70-2b6838a713bc
An alternative approach could be to send file as an ArrayBuffer or chunks of array buffers. Then re-assemble the file at server.
Or you can call FileReader constructor, FileReader.prototype.readAsArrayBuffer(), and load event of FileReader each once.
At load event of FileReader pass ArrayBuffer to Uint8Array, use ReadableStream, TypedArray.prototype.subarray(), .getReader(), .read() to get N chunks of ArrayBuffer as a TypedArray at pull from Uint8Array. When N chunks equaling .byteLength of ArrayBuffer have been processed, pass array of Uint8Arrays to Blob constructor to recombine file parts into single file at browser; then send Blob to server.
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<input id="file" type="file">
<br>
<progress value="0"></progress>
<br>
<output for="file"><img alt="preview"></output>
<script type="text/javascript">
const [input, output, img, progress, fr, handleError, CHUNK] = [
document.querySelector("input[type='file']")
, document.querySelector("output[for='file']")
, document.querySelector("output img")
, document.querySelector("progress")
, new FileReader
, (err) => console.log(err)
, 1024 * 1024
];
progress.addEventListener("progress", e => {
progress.value = e.detail.value;
e.detail.promise();
});
let [chunks, NEXT, CURR, url, blob] = [Array(), 0, 0];
input.onchange = () => {
NEXT = CURR = progress.value = progress.max = chunks.length = 0;
if (url) {
URL.revokeObjectURL(url);
if (blob.hasOwnProperty("close")) {
blob.close();
}
}
if (input.files.length) {
console.log(input.files[0]);
progress.max = input.files[0].size;
progress.step = progress.max / CHUNK;
fr.readAsArrayBuffer(input.files[0]);
}
}
fr.onload = () => {
const VIEW = new Uint8Array(fr.result);
const LEN = VIEW.byteLength;
const {type, name:filename} = input.files[0];
const stream = new ReadableStream({
pull(controller) {
if (NEXT < LEN) {
controller
.enqueue(VIEW.subarray(NEXT, !NEXT ? CHUNK : CHUNK + NEXT));
NEXT += CHUNK;
} else {
controller.close();
}
},
cancel(reason) {
console.log(reason);
throw new Error(reason);
}
});
const [reader, processData] = [
stream.getReader()
, ({value, done}) => {
if (done) {
return reader.closed.then(() => chunks);
}
chunks.push(value);
return new Promise(resolve => {
progress.dispatchEvent(
new CustomEvent("progress", {
detail:{
value:CURR += value.byteLength,
promise:resolve
}
})
);
})
.then(() => reader.read().then(data => processData(data)))
.catch(e => reader.cancel(e))
}
];
reader.read()
.then(data => processData(data))
.then(data => {
blob = new Blob(data, {type});
console.log("complete", data, blob);
if (/image/.test(type)) {
url = URL.createObjectURL(blob);
img.onload = () => {
img.title = filename;
input.value = "";
}
img.src = url;
} else {
input.value = "";
}
})
.catch(e => handleError(e))
}
</script>
</body>
</html>
plnkr http://plnkr.co/edit/AEZ7iQce4QaJOKut71jk?p=preview
You can also use utilize fetch()
fetch(new Request("/path/to/server/", {method:"PUT", body:blob}))
To transmit body for a request request, run these
steps:
Let body be request’s body.
If body is null, then queue a fetch task on request to process request end-of-body for request and abort these steps.
Let read be the result of reading a chunk from body’s stream.
When read is fulfilled with an object whose done property is false and whose value property is a Uint8Array object, run these
substeps:
Let bytes be the byte sequence represented by the Uint8Array object.
Transmit bytes.
Increase body’s transmitted bytes by bytes’s length.
Run the above step again.
When read is fulfilled with an object whose done property is true, queue a fetch task on request to process request end-of-body
for request.
When read is fulfilled with a value that matches with neither of the above patterns, or read is rejected, terminate the ongoing
fetch with reason fatal.
See also
Progress indicators for fetch?
Fetch with ReadableStream

Converting byte array output into Blob corrupts file

I am using the Office Javascript API to write an Add-in for Word using Angular.
I want to retrieve the Word document through the API, then convert it to a file and upload it via POST to a server.
The code I am using is nearly identical to the documentation code that Microsoft provides for this use case: https://dev.office.com/reference/add-ins/shared/document.getfileasync#example---get-a-document-in-office-open-xml-compressed-format
The server endpoint requires uploads to be POSTed through a multipart form, so I create a FormData object on which I append the file (a blob) as well as some metadata, when creating the $http call.
The file is being transmitted to the server, but when I open it, it has become corrupted and it can no longer be opened by Word.
According to the documentation, the Office.context.document.getFileAsync function returns a byte array. However, the resulting fileContent variable is a string. When I console.log this string it seems to be compressed data, like it should be.
My guess is I need to do some preprocessing before turning the string into a Blob. But which preprocessing? Base64 encoding through atob doesn't seem to be doing anything.
let sendFile = ( fileContent ) => {
let blob = new Blob([fileContent], { type: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' }),
fd = new FormData();
blob.lastModifiedDate = new Date();
fd.append('file', blob, 'uploaded_file_test403.docx');
fd.append('case_id', caseIdReducer.data());
$http.post('/file/create', fd, {
transformRequest: angular.identity,
headers: { 'Content-Type': undefined }
})
.success( ( ) => {
console.log('upload succeeded');
})
.error(( ) => {
console.log('upload failed');
});
};
function onGotAllSlices(docdataSlices) {
let docdata = [];
for (let i = 0; i < docdataSlices.length; i++) {
docdata = docdata.concat(docdataSlices[i]);
}
let fileContent = new String();
for (let j = 0; j < docdata.length; j++) {
fileContent += String.fromCharCode(docdata[j]);
}
// Now all the file content is stored in 'fileContent' variable,
// you can do something with it, such as print, fax...
sendFile(fileContent);
}
function getSliceAsync(file, nextSlice, sliceCount, gotAllSlices, docdataSlices, slicesReceived) {
file.getSliceAsync(nextSlice, (sliceResult) => {
if (sliceResult.status === 'succeeded') {
if (!gotAllSlices) { // Failed to get all slices, no need to continue.
return;
}
// Got one slice, store it in a temporary array.
// (Or you can do something else, such as
// send it to a third-party server.)
docdataSlices[sliceResult.value.index] = sliceResult.value.data;
if (++slicesReceived === sliceCount) {
// All slices have been received.
file.closeAsync();
onGotAllSlices(docdataSlices);
} else {
getSliceAsync(file, ++nextSlice, sliceCount, gotAllSlices, docdataSlices, slicesReceived);
}
} else {
gotAllSlices = false;
file.closeAsync();
console.log(`getSliceAsync Error: ${sliceResult.error.message}`);
}
});
}
// User clicks button to start document retrieval from Word and uploading to server process
ctrl.handleClick = ( ) => {
Office.context.document.getFileAsync(Office.FileType.Compressed, { sliceSize: 65536 /*64 KB*/ },
(result) => {
if (result.status === 'succeeded') {
// If the getFileAsync call succeeded, then
// result.value will return a valid File Object.
let myFile = result.value,
sliceCount = myFile.sliceCount,
slicesReceived = 0, gotAllSlices = true, docdataSlices = [];
// Get the file slices.
getSliceAsync(myFile, 0, sliceCount, gotAllSlices, docdataSlices, slicesReceived);
} else {
console.log(`Error: ${result.error.message}`);
}
}
);
};
I ended up doing this with the fileContent string:
let bytes = new Uint8Array(fileContent.length);
for (let i = 0; i < bytes.length; i++) {
bytes[i] = fileContent.charCodeAt(i);
}
I then proceed to build the Blob with these bytes:
let blob = new Blob([bytes], { type: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' });
If I then send this via a POST request, the file isn't mangled and can be opened correctly by Word.
I still get the feeling this can be achieved with less hassle / less steps. If anyone has a better solution, I'd be very interested to learn.
thx for your answer, Uint8Array was the solution. Just a little improvement, to avoid creating the string:
let bytes = new Uint8Array(docdata.length);
for (var i = 0; i < docdata.length; i++) {
bytes[i] = docdata[i];
}
Pff! what is wrong with a getting a instance of File and not using FileReader api? c'mon Microsoft!
You should take the byte array and throw it into the blob constructor, turning a binary blob to string in javascript is a bad idea that can lead to "out of range" error or incorrect encoding
just do something along with this
var byteArray = new Uint8Array(3)
byteArray[0] = 97
byteArray[1] = 98
byteArray[2] = 99
new Blob([byteArray])
if the chunk is an instance of a typed arrays or a instance of blob/file. in that case you can just do:
blob = new Blob([blob, chunk])
And please... don't base64 encode it (~3x larger + slower)

Can I send three requests per time to my server using JavaScript?

The problem:
I'm uploading images via JavaScript to a asp.net server page. To compute the progressive operation for each image uploading, I designed that each image has an independent request. But I'm afraid this may overload the server, specially if the number of requests is big.
The idea:
Rather than sending all requests asynchronously I want to send them as packets of three.
After those three are sent and all other requests have to wait until those three finished.
The Question:
How i can make the the other requests to wait until the previous ones
finish?
The code:
for (var i = 0; i < inputFiles.files.length; i++) {
(function (i) {
var request = new XMLHttpRequest();
request.open('POST', 'Ajax.ashx');
request.setRequestHeader('Cashe-Control', 'no-cashe');
var data = new FormData();
data.append('file[]', inputFiles.files[i]);
request.upload.addEventListener('progress', function (event) {//happening
if (event.lengthComputable) {
var percent = parseFloat(event.loaded) / parseFloat(event.total),
progressWidth = percent * parseFloat(progressBarWidth);
progressBar.children().css('width', progressWidth + 'px');
} else {}
});
request.upload.addEventListener('load', function (event) {});
request.upload.addEventListener('error', function (event) {});
request.addEventListener('readystatechange', function (event) {
if (this.readyState == 4) {
if (this.status == 200) {
var code = eval(this.response);
} else {}
}
});
request.send(data);
})(i);
}
Because you've added the jQuery tag, you could:
create n $.ajax() requests
wait with the help of $.when() until all of them resolved
and then send the next n requests
Something like this:
function uploadImages(images) {
var requests = [];
// upload 3 images (or less, if there are less images in the array)
for (var i = 0, len = Math.min(3, images.length); i < len; i++) {
var image = images.pop(),
formData = new FormData(),
request, formData = new FormData();
formData.append('file[]', image);
request = $.ajax({
url: "Ajax.ashx",
type: "POST",
data: formData,
processData: false,
beforeSend: function (xhr) {
// add progress stuff
// http://www.dave-bond.com/blog/2010/01/JQuery-ajax-progress-HMTL5/
},
success: function (data) { }
});
requests.push(request);
}
// when all our requests have finished and there are images left, start the next round...
$.when.apply($, requests).done(function () {
if (images.length) {
uploadImages(images);
}
});
}
Simplified Example
The solution:
i was using jquery 1.4 version so it did not work.
Now i'm working on 1.11 version and all done well.
$.when.apply() just work with jQuery 1.5 or later.

AJAX request Cross-Origin Request Blocked error

I have two projects; first one is a asp.net web project and the second one is embedded http server library project.
Embedded http server project is taken from : embedded http server project
I want to save a video file from user's local to user's shared storage. I'm getting and sending file from browser using ajax request. Embedded http server is supposed to get byte array and save video on client's shared storage. I have a problem that I spent days to solve but not yet found a solution.
In Chrome it stuck on stream.CopyTo(streamReader);.
In Firefox and IE it gives "Cross-Origin Request Blocked" error but Firefox saves file even it gives the error.
Here is the ajax request code:
$(document).ready(function () {
function hashFile(file, chunkSize, callback) {
var size = file.size;
var offset = 0;
var chunk = file.slice(offset, offset + chunkSize);
SendChunk(chunk,0);
var hashChunk = function () {
var reader = new FileReader();
reader.onload = function (e) {
offset += chunkSize;
if (offset < size) {
chunk = file.slice(offset, offset + chunkSize);
SendChunk(chunk,0);
}
else if (offset > size){
offset -= chunkSize;
var newchunkSize = size - offset;
chunk = file.slice(offset, offset + newchunkSize);
SendChunk(chunk,1);
}
};
reader.readAsArrayBuffer(chunk);
};
function SendChunk(chunk,end){
if(end>0)
{
var ajaxRequest = $.ajax({
type: "POST",
url: "http://clientip:8080/savefileend",
contentType: false,
processData: false,
data: chunk
});
}
else{
var ajaxRequest = $.ajax({
type: "POST",
url: "http://clientip:8080/savefile",
contentType: false,
processData: false,
data: chunk
});
ajaxRequest.done(function (e) {
hashChunk();
});
ajaxRequest.error(function (xhr) {
console.log(e);
hashChunk();
});
}
}
}
function fileInputHandler(evt) {
var files = evt.target.files;
var chunkSize = 10485760; // bytes
var start = window.performance ? performance.now() : Date.now(); // DEBUG
var onHashFile = function (digest) {
var end = window.performance ? performance.now() : Date.now(); // DEBUG
console.log(this.name, digest, (end - start) + 'ms'); // DEBUG
};
for (var i = 0, len = files.length; i < len; i++) {
hashFile(files[i], chunkSize, onHashFile);
}
}
document.getElementById('file1')
.addEventListener('change', fileInputHandler, false);
});
and here is the embedded server code to get the request:
var stream = request.GetRequestStream();
using (var streamReader = new MemoryStream())
{
stream.CopyTo(streamReader);
videoTemp = streamReader.ToArray();
}
using (var fileStream = new FileStream(path, FileMode.Append))
{
fileStream.Write(videoTemp, 0, videoTemp.Length);
}
By the way:
For IE: If I enabled "Access data sources across domains" from setting security, then it works without error in IE.
For Chrome: If I start chrome with --disable-web-security parameter it works without error in Chrome. But I have find the solution from code.
May be this problem is on the client side browser. For example, Google Chrome is blocked cross-origin by default. To allow it you may use plugin: https://chrome.google.com/webstore/detail/allow-control-allow-origi/nlfbmbojpeacfghkpbjhddihlkkiljbi
I have found the solution, if anyone needs it;
I have used http://nancyfx.org/ Nancy.Hosting.Self library for embedded http server, Here I was able to add "Access-Control-Allow-Origin" to response.Headers so that I could transfer file without error.

Categories

Resources