Can I send three requests per time to my server using JavaScript? - javascript

The problem:
I'm uploading images via JavaScript to a asp.net server page. To compute the progressive operation for each image uploading, I designed that each image has an independent request. But I'm afraid this may overload the server, specially if the number of requests is big.
The idea:
Rather than sending all requests asynchronously I want to send them as packets of three.
After those three are sent and all other requests have to wait until those three finished.
The Question:
How i can make the the other requests to wait until the previous ones
finish?
The code:
for (var i = 0; i < inputFiles.files.length; i++) {
(function (i) {
var request = new XMLHttpRequest();
request.open('POST', 'Ajax.ashx');
request.setRequestHeader('Cashe-Control', 'no-cashe');
var data = new FormData();
data.append('file[]', inputFiles.files[i]);
request.upload.addEventListener('progress', function (event) {//happening
if (event.lengthComputable) {
var percent = parseFloat(event.loaded) / parseFloat(event.total),
progressWidth = percent * parseFloat(progressBarWidth);
progressBar.children().css('width', progressWidth + 'px');
} else {}
});
request.upload.addEventListener('load', function (event) {});
request.upload.addEventListener('error', function (event) {});
request.addEventListener('readystatechange', function (event) {
if (this.readyState == 4) {
if (this.status == 200) {
var code = eval(this.response);
} else {}
}
});
request.send(data);
})(i);
}

Because you've added the jQuery tag, you could:
create n $.ajax() requests
wait with the help of $.when() until all of them resolved
and then send the next n requests
Something like this:
function uploadImages(images) {
var requests = [];
// upload 3 images (or less, if there are less images in the array)
for (var i = 0, len = Math.min(3, images.length); i < len; i++) {
var image = images.pop(),
formData = new FormData(),
request, formData = new FormData();
formData.append('file[]', image);
request = $.ajax({
url: "Ajax.ashx",
type: "POST",
data: formData,
processData: false,
beforeSend: function (xhr) {
// add progress stuff
// http://www.dave-bond.com/blog/2010/01/JQuery-ajax-progress-HMTL5/
},
success: function (data) { }
});
requests.push(request);
}
// when all our requests have finished and there are images left, start the next round...
$.when.apply($, requests).done(function () {
if (images.length) {
uploadImages(images);
}
});
}
Simplified Example

The solution:
i was using jquery 1.4 version so it did not work.
Now i'm working on 1.11 version and all done well.
$.when.apply() just work with jQuery 1.5 or later.

Related

How to know if XMLHttpRequest has finished?

I'm uploading multiple files with form field
<input type="file" accept="image/jpeg, image/png" multiple
a loop sends all files to upload function
// Upload photo function
var upload = function (photo, callback) {
var formData = new FormData();
formData.append('photo', photo);
var request = new XMLHttpRequest();
request.onreadystatechange = function() {
if (request.readyState === 4) {
callback(request.response);
}
}
request.open('POST', 'upload.php');
request.responseType = 'json';
request.send(formData);
};
for (var i = 0; i < files.length; i++) {
upload(resizedFile, function (response) { });
}
if all files are successfully uploaded, want to redirect ..
//all files finished, go to ...
if(response !== null && typeof response !== 'object') var response = JSON.parse(response);
window.location.href = "my-new-url.php?id="+response.id;
my problem, the redirect is done before all uploads are finished. how can I make the script wait for all xmlhttprequests to finish?
EDIT
I changed the upload function like this, to upload with jQuery. The fuction is called several times from the for loop, so how can I wait for all loops/jquery calls are finished, then redirect?
function upload (photo, callback) {
var fd = new FormData();
fd.append('photo',photo);
return $.ajax({
url: 'upload.php',
type: 'post',
data: fd,
contentType: false,
processData: false,
success: function(response){
},
});
};
You can increment a global counter variable in the callback. When it reaches the length of the array, all the requests have completed.
var completed = 0;
for (var i = 0; i < files.length; i++) {
upload(resizedFile, function (response) {
if (++completed == files.length) {
window.location = redirect_url;
}
});
}
First of all, I would promisify the upload function. With JQuery would be much shorter as $.ajax returns a thenable object, but with old-fashioned vanilla JS, it would look like this:
var upload = function (photo) {
return new Promise(function(resolve, reject) {
var request = new XMLHttpRequest();
request.onload = function() {
if (request.status >= 200 && request.status < 300) {
resolve(request.response);
} else {
reject(request.statusText);
}
};
request.onerror = function() { reject(request.statusText) }
request.open('POST', 'upload.php');
request.responseType = 'json';
var formData = new FormData();
formData.append('photo', photo);
request.send(formData);
})
}
See also: http://ccoenraets.github.io/es6-tutorial-data/promisify/
Then I would collect these promises in an array:
var uploads = files.map(function(file) { return upload(resizeFile(file)); })
(or simply iterate as you did and push them in an array)
Then you can use this code to handle the completion of all uploads:
Promise.all(uploads)
.then(function(results) { /* do the redirect */ })
.catch(function(error) { console.log(error); });
See also: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/all
Note: Why is counting suggested in an other answer wrong? Because it does not handle the condition when any of the requests fails. You will not be able to detect that case, and you will never reach the condition to be satisfied. Promises (and async/await) are the right way of handling asynchronous operations, like ajax requests.
[Update]
Here is the JQuery approach using the latest JS language features:
var upload = (photo) => {
photo = resizeFile(photo);
let formData = new FormData();
formData.append('photo', photo);
return $.ajax({
type: 'POST',
url: 'upload.php',
data: formData
}) // check other the options of ajax method, you might need them
}
let uploads = files.map(upload);
$.when.apply($,uploads)
.done(_ => { /* do the redirect */ })
.fail(e => { /* handle error */ })
The latter part can be written with async/await syntax:
async somefunction(){
// rest of the code
try {
await $.when.apply($,uploads)
/* do the redirect */
} catch(ex) {
/* handle error */
}
}
Note, that $.ajax returns Deferred, which is a JQuery object, compatible with Promise.

File upload in AngularJS causes browser out of memory error

I have an AngularJS code to perform upload of files. At first I thought it works OK but when I've tried to upload bigger files (e.g. 5 files, 10 MB each) then I saw that this code has very poor performance regarding memory handling.
For example: when I attach 5 files (10 MB each = 50 MB in total) then peak browser process memory demand reaches 2 GB!
I've tested it in Firefox, Chrome and IE. Additionally, in Chrome when browser process reaches 3 GB of memory committed then the page crashes.
In Firefox I can see an error in console when trying to upload more than 80 MB at once:
out of memory
I've tried to modify the parts where the actual request is made and removed data: JSON.stringify(formattedData) which was the first bottleneck. And it turned out that stringifying is redundant there.
But the second bottleneck still exists which is the function binArrayToJson which takes the ArrayBuffer and reads the data into the byte array.
var binArrayToJson = function (binArray) {
var str = [binArray.length];
for (var i = 0, binLength = binArray.length; i < binLength; i++) {
str[i] = binArray[i];
}
return str;
}
var applyAttachments = function (data1) {
angular.forEach(data1,
function (value, key) {
if (key === "Attachments") {
for (var i = 0; i < value.length; i++) {
if (value[i].file) { //already saved item don't contain 'file' property
var reader = new FileReader();
reader.onloadend = (function (f) {
return function (result) {
var arrayBuffer = result.target.result;
value[f].fileContent = binArrayToJson(new Uint8Array(arrayBuffer));
value[f].isUploaded = true;
};
})(i);
reader.readAsArrayBuffer(value[i].file);
}
}
}
});
};
var createMVCModel = function (output) {
var defaultStringValue = "";
return {
id: output.id,
Name: output.name || defaultStringValue,
Status: output.status || "Draft",
Date: output.date || null
Attachments: output.attachments || []
};
};
var saveModel = function (data, url) {
var formattedData = createMVCModel(data);
var deferred = $q.defer();
applyAttachments(formattedData);
var check = function () {
if (allNewFilesUploaded(formattedData) === true) {
$http({
url: url,
method: "POST",
data: formattedData,
headers: { 'Content-Type': undefined }
})
.then(function (result) {
deferred.resolve(result);
},
function (result) {
deferred.reject(result);
});
} else {
setTimeout(check, 1000);
}
}
check();
return deferred.promise;
};
I omitted the parts where the following requirements are being met:
checking number of files (limit is 10)
checking size of each file (limit is 10 MB each)
checking permitted file extensions
sending rest of the (text) data along with files to the server (ASP.NET MVC method)
Doing Multiple $http.post Requests Directly from a FileList
To avoid memory problems, send the files directly:
$scope.upload = function(url, fileList) {
var config = { headers: { 'Content-Type': undefined },
transformResponse: angular.identity
};
var promises = fileList.map(function(file,index) {
config.params = {'n': index, 'name': file.name};
return $http.post(url, file, config);
});
return $q.all(promises);
};
When sending a POST with a File object, it is important to set 'Content-Type': undefined. The XHR send method will then detect the File object and automatically set the content type.

AJAX request Cross-Origin Request Blocked error

I have two projects; first one is a asp.net web project and the second one is embedded http server library project.
Embedded http server project is taken from : embedded http server project
I want to save a video file from user's local to user's shared storage. I'm getting and sending file from browser using ajax request. Embedded http server is supposed to get byte array and save video on client's shared storage. I have a problem that I spent days to solve but not yet found a solution.
In Chrome it stuck on stream.CopyTo(streamReader);.
In Firefox and IE it gives "Cross-Origin Request Blocked" error but Firefox saves file even it gives the error.
Here is the ajax request code:
$(document).ready(function () {
function hashFile(file, chunkSize, callback) {
var size = file.size;
var offset = 0;
var chunk = file.slice(offset, offset + chunkSize);
SendChunk(chunk,0);
var hashChunk = function () {
var reader = new FileReader();
reader.onload = function (e) {
offset += chunkSize;
if (offset < size) {
chunk = file.slice(offset, offset + chunkSize);
SendChunk(chunk,0);
}
else if (offset > size){
offset -= chunkSize;
var newchunkSize = size - offset;
chunk = file.slice(offset, offset + newchunkSize);
SendChunk(chunk,1);
}
};
reader.readAsArrayBuffer(chunk);
};
function SendChunk(chunk,end){
if(end>0)
{
var ajaxRequest = $.ajax({
type: "POST",
url: "http://clientip:8080/savefileend",
contentType: false,
processData: false,
data: chunk
});
}
else{
var ajaxRequest = $.ajax({
type: "POST",
url: "http://clientip:8080/savefile",
contentType: false,
processData: false,
data: chunk
});
ajaxRequest.done(function (e) {
hashChunk();
});
ajaxRequest.error(function (xhr) {
console.log(e);
hashChunk();
});
}
}
}
function fileInputHandler(evt) {
var files = evt.target.files;
var chunkSize = 10485760; // bytes
var start = window.performance ? performance.now() : Date.now(); // DEBUG
var onHashFile = function (digest) {
var end = window.performance ? performance.now() : Date.now(); // DEBUG
console.log(this.name, digest, (end - start) + 'ms'); // DEBUG
};
for (var i = 0, len = files.length; i < len; i++) {
hashFile(files[i], chunkSize, onHashFile);
}
}
document.getElementById('file1')
.addEventListener('change', fileInputHandler, false);
});
and here is the embedded server code to get the request:
var stream = request.GetRequestStream();
using (var streamReader = new MemoryStream())
{
stream.CopyTo(streamReader);
videoTemp = streamReader.ToArray();
}
using (var fileStream = new FileStream(path, FileMode.Append))
{
fileStream.Write(videoTemp, 0, videoTemp.Length);
}
By the way:
For IE: If I enabled "Access data sources across domains" from setting security, then it works without error in IE.
For Chrome: If I start chrome with --disable-web-security parameter it works without error in Chrome. But I have find the solution from code.
May be this problem is on the client side browser. For example, Google Chrome is blocked cross-origin by default. To allow it you may use plugin: https://chrome.google.com/webstore/detail/allow-control-allow-origi/nlfbmbojpeacfghkpbjhddihlkkiljbi
I have found the solution, if anyone needs it;
I have used http://nancyfx.org/ Nancy.Hosting.Self library for embedded http server, Here I was able to add "Access-Control-Allow-Origin" to response.Headers so that I could transfer file without error.

jQuery not working when sending sync XMLHttpRequest

I have this code which sends a string into chuncks.
Everything is OK expect that when the script is posting to the server #status doesn't update.
The request is sync and not async because I want to post one chunk at a time and not everything at onces.
for (var packet in packets) {
var calculated = ((packet/(packets_lenght-1))*100);
$('#status').text(calculated);
var formData = new FormData();
formData.append("packet", packets[packet]);
formData.append("packet_num", packet);
formData.append("name", 'name');
var request = new XMLHttpRequest();
request.open("POST", "index.php", false);
request.send(formData);
}
Can someone tell me what I am doing wrong?
Thanks.
That is because browser didn't refresh view state while he is busy by execution code ( js executed in single threat - so in case of sync requests - ints normal )
you should async iterations over array like this http://jsfiddle.net/7TzF8/
Sorry - z made minor changes to just show idea, you can refactor your code according main idea
var packets = str2chunk(JSON.stringify(frames_array).split('data:image/jpeg;base64,').join(''), 10);
var packets_lenght = packets.length;
function processPacket(packet){
if(packet >= packets_lenght){
return;
}
$.ajax({
type: "POST",
url: "http://fiddle.jshell.net/echo/jsonp/",
data: {
packet: packets[packet],
packet_num: packet,
name: 'name'
},
success: function (answer) {
console.log(packet);
var calculated = ((packet / (packets_lenght - 1)) * 100);
$('#status').text(calculated + '%');
processPacket(packet+1);
},
error: function () {
}
});
}
processPacket(0);

XMLHttpRequest that is being Aborted

I'm looking over a bit of code that deals with XHR. It looks like the first XHR.send() is being done successfully and then the subsequent one is Aborted before it gets to it's .send()
Quick in dirty:
url = "http://192.168.1.1/cgi-bin/test.cgi";
data = "1235,123,21,1232,12321,432";
myXHR = new Array();
for(var i = 0; i < 2; i++) {
myXHR[i] = new XMLHttpRequest();
myXHR[i].open("POST", url, true);
myXHR[i].onerror = function() {
alert("Error occurred");
};
myXHR[i].onload = function() {
if(myXHR[i].status == 200) {
alert("Yay I worked");
var data = myXHR[i].responseText;
}
};
// do some setting up of XHR headers
myXHR[i].send(data);
myXHR[i] = null;
}
What could be happening that would cause Firebug to show Abort before the second .send() is done?
Try this:
url = "http://192.168.1.1/cgi-bin/test.cgi";
data = "1235,123,21,1232,12321,432";
var myXHR = [];
for(var i = 0; i < 2; i++) {
myXHR[i] = new XMLHttpRequest();
myXHR[i].open("POST", url, true);
myXHR[i].onerror = function() {
alert("Error occurred");
};
myXHR[i].onload = function() {
if(myXHR[i].status == 200) {
alert("Yay I worked");
var data = myXHR[i].responseText;
}
};
// do some setting up of XHR headers
myXHR[i].send(data);
myXHR[i] = null;
}
When I run this code I get TypeError: myXHR[i] is undefined (on the stock firefox 20 install on my mac... what version are you on)?
At any rate, I can see one issue with this (i.e. myXHR[i] will be undefined...) that might also apply to you, in particular with:
myXHR[i].onload = function() {
if(myXHR[i].status == 200) {
alert("Yay I worked");
var data = myXHR[i].responseText;
}
};
Because this is triggered asynchronously i will have been incremented to 2, which is of course going to be outside the bounds of the two element myXHR array. Have you tried closing over the value of i, like so:
myXHR[i].onload = (function(i) {
return function() {
if(myXHR[i].status == 200) {
alert("Yay I worked");
var data = myXHR[i].responseText;
}
}
})(i);
Because once I correctly save that i value in that function body this code will succeed for both calls.
I know this isn't the exact issue you're having, but I think it will be an issue regardless so you may as well give it a go right? It's not as though there have been a huge number of other answers unfortunately.
hope this helps..
Found out what was happening.
The XHR was being aborted because there was no return value from the webserver that the request was being sent to. The web server is a custom based one that we seem to be using the someone changed the code so that it wasn't sending a 200 Success OK even if the data sent to it had no data coming back.
All good now. Thanks for the help.

Categories

Resources