Multifile upload as chunks - javascript

I'm setting up multi file upload in client-side.Below are the steps, I need to done:
User will upload multiple files at once. Ex: 5 files at a time.
Each file size is vary and large files. Ex: File1.size = 800mb
Slice that large files into chunks. Ex: chunk_size = 50mb
Each chunk will be send to backend and will get response from it.
I'm able to do it for single file upload, like splicing large file chunks.But I'm unable to do it for multiple files uploaded at a time.
Below is the code I tried:
var that = this;
var files = that.files || [];
var url = that.config.url;
var fileKeyUpload = that.config.fileKeyUpload;
for(var i=0; i < files.length; i++) { //multiple files loop
var start = 0; //chunk start as 0
var bytes_per_chunk = 52428800; // 50 MB per chunk
var blob = files[i];
var size = blob.size;
var end = bytes_per_chunk;
var getStatus = function() {
var nextChunk = start + bytes_per_chunk;
var percentage = nextChunk > size ? 100 : Math.round((start / size) * 100); // calculating percentage
uploadFile(blob.slice(start, end), files[key], start, percentage).then(function() {
if(start < size && bytes_per_chunk < size){
start = end;
end = start + bytes_per_chunk;
start < size ? getStatus() : null;
}
});
}
return getStatus();
}
function uploadFile(blob, file, offset, completedPrecentile) {
return new Promise(function(resolve) {
var xhr = new XMLHttpRequest();
xhr.open("POST", url);
xhr.setRequestHeader("Accept", "*/*");
xhr.setRequestHeader("X-Requested-With", "XMLHttpRequest");
xhr.onreadystatechange = function() {
if (xhr.readyState === 4 && xhr.status === 200) {
if(that.config.hasOwnProperty("onFileUploadComplete")){
that.config.onFileUploadComplete(xhr, true, completedPrecentile, file);
resolve();
}
}
};
xhr.send(blob);
});
};
This is what I tried, it works for single file, Any help on how to achieve multiple file upload with slicing large files into chunks.
Unable to post working demo since local api is used.

Related

SHA256 Hashing Large files in angular 6 using Filereader Issue

I have problem with SHA256 hashing. if the file size is more then 250 MB it is terminating browser and crashing.
below is the hashing code please do help us.
let fileReader = new FileReader();
fileReader.readAsArrayBuffer(fileToSend);
fileReader.onload = (e) => {
const hash = CrypTo.SHA256(this.arrayBufferToWordArray(fileReader.result)).toString();
this.hashCode=hash;
this.fileHistory.MediaHash = hash;
this.fileHistory.FileName = fileToSend.name;
//Insert to file history
this.fileHistoryService.postFiles(this.fileHistory).subscribe(
data => {
this.hashCode=data["MediaHash"];
this.alertService.success('HASHFILE.FileUploadSuccessMessage', true);
this.hideGenerateHashCodeButton = true;
},
error => {
this.alertService.error('COMMONERRORMESSAGE.SomethingWentWrongErrorMessage');
});
}
arrayBufferToWordArray(fileResult) {
var i8a = new Uint8Array(fileResult);
var byteArray = [];
for (var i = 0; i < i8a.length; i += 4) {
byteArray.push(i8a[i] << 24 | i8a[i + 1] << 16 | i8a[i + 2] << 8 | i8a[i + 3]);
}
return CrypTo.lib.WordArray.create(byteArray, i8a.length);
}
Below code I tested for all the big files, which fixed my solution.
var hashdata = CrypTo.algo.SHA256.create();
var file =**<FiletoHash>**;
if(file){
var reader = new FileReader();
var size = file.size;
var chunk_size = Math.pow(2, 22);
var chunks = [];
var offset = 0;
var bytes = 0;
reader.onloadend = (e) =>{
if (reader.readyState == FileReader.DONE){
//every chunk read updating hash
hashdata.update(this.arrayBufferToWordArray(reader.result));
let chunk:any = reader.result;
bytes += chunk.length;
chunks.push(chunk);
if((offset < size)){
offset += chunk_size;
var blob = file.slice(offset, offset + chunk_size);
reader.readAsArrayBuffer(blob);
} else {
//use below hash for result
//finaly generating hash
var hash = hashdata.finalize().toString();
//debugger;
};
}
};
var blob = file.slice(offset, offset + chunk_size);
reader.readAsArrayBuffer(blob);
}
}
}
arrayBufferToWordArray(fileResult) {
var i8a = new Uint8Array(fileResult);
return CrypTo.lib.WordArray.create(i8a, i8a.length);
}
You should definitely use streams or something like it to avoid loading all the file into your memory.
Specifically using CryptoJS, I have seen that it's possible to perform progressive Hashing.
var sha256 = CryptoJS.algo.SHA256.create();
sha256.update("Message Part 1");
sha256.update("Message Part 2");
sha256.update("Message Part 3");
​
var hash = sha256.finalize();
So, use FileReader to read parts of the file, then every time you read a part, you update the sha256 until there is nothing more to read.
See :
filereader api on big files

_formdataPolyfill2.default is not a constructor is thrown when I try to use formdata-polyfill npm

I am using web workers for uploading file and I am sending the file in the form of formData object as got to know that the web worker doesn't have access to DOM I used formdata-polyfill in place of default FormData , it is throwing this error and I don't know how to use this polyill properly.
here is my code,
//trying to send the formdata-polyfill object to worker
require('formdata-polyfill');
let data = new FormData();
data.append('server-method', 'upload');
data.append('file', event.target.files[0]);
// let data = new FormData(event.target.files[0]);
if (this.state.headerActiveTabUid === '1')
this.props.dispatch(handleFileUpload({upload: 'assets', data}));
//worker.js
var file = [], p = true, url,token;
function upload(blobOrFile) {
var xhr = new XMLHttpRequest();
xhr.open('POST', url, true);//add url to upload
xhr.setRequestHeader('Authorization', token);
xhr.onload = function(e) {
};
xhr.send(blobOrFile);
}
function process() {
for (var j = 0; j <file.length; j++) {
var blob = file[j];
const BYTES_PER_CHUNK = 1024 * 1024;
// 1MB chunk sizes.
const SIZE = blob.size;
var start = 0;
var end = BYTES_PER_CHUNK;
while (start < SIZE) {
if ('mozSlice' in blob) {
var chunk = blob.mozSlice(start, end);
} else {
var chunk = blob.slice(start, end);
}
upload(chunk);
start = end;
end = start + BYTES_PER_CHUNK;
}
p = ( j === file.length - 1);
self.postMessage(blob.name + " Uploaded Succesfully");
}
}
self.addEventListener('message', function(e) {
url = e.data.url;
token = e.data.id;
file.push(e.data.files);
if (p) {
process();
}
});

Javascript filereader onload ( get file from server )

What I want is to read a file from the windows file system or a server so I can display the contents on the website and we are not allowed to use a database or PHP only Javascript.
What I currently have is beneath this and it works if I get the file from a html file upload box the only thing I need is how do I get a file in the javascript without inserting it manually but to load on pageload.
The rest of the code works if I insert the file manually I only need to get a file and insert it into var file = ;
var file = // How do I get file from windows system / or server is also a possibility
var reader = new FileReader();
reader.onload = function(progressEvent){
// Entire file
console.log(this.result);
// By lines
var lines = this.result.split('\n');
for(var line = 0; line < lines.length; line++){
console.log(lines[line]);
}
};
reader.readAsText(file);
I got it to work
var file = readTextFile("test.txt");
var allText;
var trumpCount = 0;
var hilaryCount = 0;
var reader = new FileReader();
// Entire file
console.log(this.result);
// alert(allText);
// By lines
var lines = allText.split('\n');
for(var line = 0; line < lines.length; line++){
// alert(lines[line]);
if (lines[line].indexOf("t") !== -1){
trumpCount++;
}else{
hilaryCount++;
}
}
alert("Votes for trump: " + trumpCount + " Votes for hilary: " + hilaryCount + " Total votes: " + (trumpCount + hilaryCount))
function readTextFile(file)
{
var rawFile = new XMLHttpRequest();
rawFile.open("GET", file, false);
rawFile.onreadystatechange = function ()
{
if(rawFile.readyState === 4)
{
if(rawFile.status === 200 || rawFile.status == 0)
{
allText = rawFile.responseText;
//alert(allText);
}
}
}
rawFile.send(null);
}

How to log contents of HTML5 drag and drop file that is 60MB+ without hanging for minutes?

I have a file that i want to drop on a page and read file contents. its a CSV with 9 columns. My drop command outputs file contents like this:
function drop(ev) {
ev.preventDefault();
var data = ev.dataTransfer.files[0];
var fileReader = new FileReader();
fileReader.onload = function (e) {
console.log(fileReader.result)
};
fileReader.onerror = function (e) {
throw 'Error reading CSV file';
};
// Start reading file
fileReader.readAsText(data);
return false;
}
When I drag and drop a simple file that is a couple kilobytes or 1MB, I can see the output of the contents of the file. However given a large CSV file, it takes many many minutes before it shows up. Is there a way to make it so that there is some streaming maybe where it does not look like its hanging?
With Screw-FileReader
You can get a ReadableStream and do it in a streaming fashion
'use strict'
var blob = new Blob(['111,222,333\naaa,bbb,ccc']) // simulate a file
var stream = blob.stream()
var reader = stream.getReader()
var headerString = ''
var forEachLine = function(row) {
var colums = row.split(',')
// append to DOM
console.log(colums)
}
var pump = function() {
return reader.read().then(function(result) {
var value = result.value
var done = result.done
if (done) {
// Do the last line
headerString && forEachLine(headerString)
return
}
for (var i = 0; i < value.length; i++) {
// Get the character for the current iteration
var char = String.fromCharCode(value[i])
// Check if the char is a new line
if (char.match(/[^\r\n]+/g) !== null) {
// Not a new line so lets append it to
// our header string and keep processing
headerString += char
} else {
// We found a new line character
forEachLine(headerString)
headerString = ''
}
}
return pump()
})
}
pump().then(function() {
console.log('done reading the csv')
})
<script src="https://cdn.rawgit.com/jimmywarting/Screw-FileReader/master/index.js"></script>
If you prefer using the old FileReader without dependencies, pipe's and transform
'use strict'
var blob = new Blob(['111,222,333\naaa,bbb,ccc']) // simulate a file
var fr = new FileReader()
var headerString = ''
var position = 0
var forEachLine = function forEachLine(row) {
var colums = row.split(',')
// append to DOM
console.log(colums)
}
var pump = function pump() {
return new Promise(function(resolve) {
var chunk = blob.slice(position, position + 524288)
position += 524288
fr.onload = function() {
var value = fr.result
var done = position >= blob.size
for (var i = 0; i < value.length; i++) {
var char = value[i]
// Check if the char is a new line
if (char.match(/[^\r\n]+/g) !== null) {
// Not a new line so lets append it to
// our header string and keep processing
headerString += char
} else {
// We found a new line character
forEachLine(headerString)
headerString = ''
}
}
if (done) {
// Send the last line
forEachLine(headerString)
return resolve() // done
}
return resolve(pump())
}
// Read the next chunk
fr.readAsText(chunk)
})
}
pump().then(function() {
console.log('done reading the csv')
})

data uploading by ajax in iOS webview

In iOS webview I have implemented file uploading:
function upload(blobOrFile) {
var xhr = new XMLHttpRequest();
xhr.open('POST', '/server', true);
xhr.onload = function(e) { ... };
xhr.send(blobOrFile);
}
document.querySelector('input[type="file"]').addEventListener('change', function(e) {
var blob = this.files[0];
const BYTES_PER_CHUNK = 1024 * 1024; // 1MB chunk sizes.
const SIZE = blob.size;
var start = 0;
var end = BYTES_PER_CHUNK;
while(start < SIZE) {
upload(blob.slice(start, end));
start = end;
end = start + BYTES_PER_CHUNK;
}
}, false);
})();
This JavaScript work well in all browser, but in iOS webview it send empty POST ?
Before send I tried to alert() blob length was correct, but in server side I get 0 content length. What can cause this problem and is possible to fix this from JS side ?

Categories

Resources