I am using html File input to attach multiple files.
I am using below code to loop throw all files and save to database. but it's saving only last attached file to database multiple times.
If I attach file 1 , file 2 . File 2 is saved to database 2 times. /n
If I attach file 1 , file 2 ,file 3. File 3 is saved to database 3 times.
for (let i = 0; i < FileList.length; i++)
{
var artworkFileName = FileList[i].name;
var artworkMimeType = FileList[i].type;
var file = FileList[i];
if (file) {
// Read the file
var reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = function (e) {
var artworkDocumentbodyContents = e.target.result;
artworkDocumentbodyContents = artworkDocumentbodyContents.substring(artworkDocumentbodyContents.indexOf(',') + 1);
UploadFileAPI(artworkDocumentbodyContents, artworkFileName, artworkMimeType,varQues,IsAllSubmit,varSaveButton,);// To save the files to Database(dataverse table)
};
} }
Any suggestions.
Related
TinyMCE is not allowing multiple file uploads in a post. You can select a file, and it will insert into the TinyMCE editor however once you submit, only the last inserted image is uploaded.
Below is the code I am working with: (the [0] is removed on the second attempt in which I was thinking TinyMCE would handle the files array)
if (meta.filetype == 'image') {
$('#upload').trigger('click')
$('#upload').on('change', function() {
var file = this.files[0]
var reader = new FileReader()
var name = file.name.split('.')[0]
var blobCache = tinymce.activeEditor.editorUpload.blobCache
var blobInfo = blobCache.create(name, file, reader.result)
blobCache.add(blobInfo);
reader.onload = function(e) {
callback(blobInfo.blobUri(), {
alt: file.name,
title: name
})
}
reader.readAsDataURL(file)
})
}
}
I have tried to append additional files with a for loop and removed the [0] from files and split without success.
file_picker_callback: function(callback, value, meta) {
if (meta.filetype == 'image') {
$('#upload').trigger('click')
$('#upload').on('change', function() {
var file = this.files//[0]
var reader = new FileReader()
var name = []
for(var x = 0; x < file.length; x++) {
name.push(file[x].name.split('.'))
}
var blobCache = tinymce.activeEditor.editorUpload.blobCache
var blobInfo = blobCache.create(name, file, reader.result)
blobCache.add(blobInfo);
reader.onload = function(e) {
callback(blobInfo.blobUri(), {alt: file.name, title: name})
}
reader.readAsDataURL(file)
})
}
}
I have also tried allowing auto upload which only work for the first image and the rest fallback to base64 in the database. Lastly, I tried to combine all files in order to upload however I'm not seeing different file names in console.log. For instance I attach one file, I see one file in console.log, I attach another, I see two responses in console.log but with the most recent attached file but only the last attached image will upload on submit. It seems that TinyMCE overwrites the file with each image attachemnt.
Is there a different approach to this so I can add images to a post with TinyMCE and upon submit, they are all uploaded instead of the last image attached?
Changed to the following now for a working solution. Using the name for the first argument when calling blobCache.create was the cause of the issue, a unique blobid is required instead.
file_picker_callback: function(callback, value, meta) {
if (meta.filetype == 'image') {
$('#upload').on('change', function() {
var file = this.files[0]
var reader = new FileReader()
reader.onload = function(e) {
// var name = file.name.split('.')[0] // replaced with id below
// var base64 = reader.result.split(',')[1]; // for base64
var id = 'blobid' + (new Date()).getTime();
var blobCache = tinymce.activeEditor.editorUpload.blobCache
var blobInfo = blobCache.create(id, file, reader.result)
blobCache.add(blobInfo);
callback(blobInfo.blobUri(), {alt: file.name, title: name})
}
reader.readAsDataURL(file)
})
$('#upload').trigger('click')
}
}
I need your help with following problem:
I have HTML input which supports multiple files;
I upload let's say 5 files;
Each file is processed: it is readAsDataURL by FileReader and data of file is saved to object(there will be other params saved too, that is why object), which is pushed to array.
After I run flow I described, length of final array is NOT changed.
I believe problem is in async behaviour, but I cannot understand how should I change code to make it work, that is why I ask you for a help. Please find code below:
var controls = document.getElementById('controls');
function processUploadedFilesData(files) {
if (!files[0]) {
return;
};
var uploads = [];
for (var i = 0, length = files.length; i < length; i++) {
(function(file) {
var reader = new FileReader();
//I need object, as other params will be saved too in future;
var newFile = {};
reader.readAsDataURL(file);
reader.onloadend = function(e) {
newFile.data = e.target.result;
uploads.push(newFile);
}
})(files[i]);
}
return uploads;
}
controls.addEventListener('change', function(e) {
var uploadedFilesOfUser = processUploadedFilesData(e.target.files);
alert(uploadedFilesOfUser.length);
});
Codepen example - https://codepen.io/yodeco/pen/xWevRy
I am creating a drag and drop file upload zone. When I upload multiple files at a time it works but I need it to be able to support uploading in multiple stages. I know that the issue below is that I am setting files each time, but I can't figure out the right way to get more files added to dFiles each time the method is called
var dFiles;
//var dFiles = []
var holder = document.getElementById('holder');
holder.ondrop = function (e) {
e.preventDefault();
dFiles = (e.dataTransfer.files);
//dFiles.push(e.dataTransfer.files);
}
I tried initilaizing dfiles as an empty array and adding the files (commented out above). Later this created a data type mismatch error when I was reading the file data
for (var i = 0; i < dFiles.length; i++) {
reader = new FileReader();
reader.readAsDataUrl(dFiles[i]); //error
}
e.dataTransfer.files is a list of files.
You will have to add each file separately to dFiles:
var files = e.dataTransfer.files;
for (var i = 0, l = files.length; i < l; i++) {
dFiles.push(files[i]);
}
Or the ES6 way:
dFiles.push(...e.dataTransfer.files);
I have a page where the user can select a folder to upload files. Before sending the files, I need to read them and check the data. My code is organized as follows:
$( '#folder-select' ).on('change', getValidFileList);
var fileList = [];
var getValidFileList = function(event) {
//Get the selected files
files = $( this ).get(0).files;
for(var i=0; i<files.length; i++) {
checkFile(files[i]);
}
//Do something with the final fileList
console.log(fileList);
};
var checkFile = function(file) {
var reader = new FileReader();
reader.onload = function (event) {
//Here I parse and check the data and if valid append it to fileList
};
reader.readAsArrayBuffer(file);
};
I would like to take the resulting fileList array to keep processing/displaying the uploaded files. I found that reader.onload() is called asynchronously, so the result of the console.log(fileList) after the for loop is an empty array (it is executed before the reader.onload() is fired). Is there any way to wait until all files are read and appended to fileList?
Just keep track of how many files has been processed compared to how many files has been given:
function getValidFileList(files, callback) {
var count = files.length; // total number of files
var fileList = []; // accepted files
//Get the selected files
for(var i = 0; i < count; i++) { // invoke readers
checkFile(files[i]);
}
function checkFile(file) {
var reader = new FileReader();
reader.onload = function(event) {
var arrayBuffer = this.result;
//Here I parse and check the data and if valid append it to fileList
fileList.push(arrayBuffer); // or the original `file` blob..
if (!--count) callback(fileList); // when done, invoke callback
};
reader.readAsArrayBuffer(file);
}
};
The --count will subtract one per reader onload hit. When =0 (or !count) it invokes the callback. Notice that the array order may not be the same as the one from files[n] it this should matter.
Then invoke it like this:
$( '#folder-select' ).on('change', function() {
getValidFileList(this.files, onDone)
});
function onDone(fileList) {
// continue from here
}
I want to read the data of a specific xlsx file into javascript to show them on a .html page.
For that i downloaded sheetjs. A .xlsx parser.
But now I realized that you kinda must choose a file in the html site to read it. But I only want to use one file the whole time. Also the html site will only run local. So is there any way to get going with setting up a absolute path in the js to the file. So something like
file= "C:\test.xlsx"
Code example from the documemtary:
function handleFile(e) {
var files = e.target.files;
var i,f;
for (i = 0, f = files[i]; i != files.length; ++i) {
var reader = new FileReader();
var name = f.name;
reader.onload = function(e) {
var data = e.target.result;
var workbook = XLSX.read(data, {type: 'binary'});
/* DO SOMETHING WITH workbook HERE */
};
reader.readAsBinaryString(f);
}
}
input_dom_element.addEventListener('change', handleFile, false);