HTML 5 File Reader reading javaScript files - javascript

I'm trying to allow users to drag and drop a folder containing JavaScript files into an html5 page. This is what I currently have:
$scope.files = [];
//Establish dropzone
var dropbox;
dropbox = document.getElementById("fileDragAndDrop");
dropbox.addEventListener("dragenter", dragenter, false);
dropbox.addEventListener("dragover", dragover, false);
dropbox.addEventListener("drop", drop, false);
//Events
function dragenter(e) {
e.stopPropagation();
e.preventDefault();
};
function dragover(e) {
e.stopPropagation();
e.preventDefault();
};
function drop(e) {
e.stopPropagation();
e.preventDefault();
var items = e.dataTransfer.items;
for (var i = 0, item; item = items[i]; i ++) {
var entry = item.webkitGetAsEntry();
if(entry) {
traverseFileTree(entry);
}
}
};
//resursive file walker
function traverseFileTree(item) {
if(item.isFile) {
$scope.$apply(function () {
$scope.files.push(item);
});
} else if (item.isDirectory) {
var dirReader = item.createReader();
dirReader.readEntries(function(entries) {
for (var i = 0; i < entries.length; i++) {
traverseFileTree(entries[i]);
}
});
}
};
So the dragging and dropping works, but I'm having problems reading the file content.
$scope.parse = function () {
for(var i = 0; i < $scope.files.length; i++) {
var fileReader = new FileReader();
fileReader.onload = function (e) {
console.log(fileReader.result);
};
fileReader.onerror = function(err) {
console.log(err);
};
fileReader.readAsBinaryString($scope.files[i]);
}
};
I am not getting any error messages, which makes it hard to debug. Am I doing something wrong? has anyone had any issues doing similar tasks?

Not sure what your $scope is but giving it a go.
As you use webkitGetAsEntry() I assume this is for Chrome. From the looks of it your code should give you an error. If it does not, there is likely something you have omitted. You should typically get something like:
Uncaught TypeError: Failed to execute 'readAsBinaryString' on 'FileReader': The argument is not a Blob.
in your $scope.parse function.
There is a few issues. One is that you probably would read files as text and not as binary string. Secondly readAsBinaryString() is deprecated, use readAsArrayBuffer() if you want to read binary data.
Further, the webkitGetAsEntry() returns a FileEntry, hence why you should get the error mentioned above. To read the file you could typically do:
$scope.files[i].file(success_call_back, [error_call_back]);
For example:
function my_parser(file) {
var fileReader = new FileReader();
fileReader.onload = function (e) {
console.log(fileReader.result);
};
fileReader.onerror = function(err) {
console.log(err);
};
console.log('Read', file);
// Here you could (should) switch to another read operation
// such as text or binary array
fileReader.readAsBinaryString(file);
}
$scope.files[0].file(my_parser);
This will give you a File object as argument to my_parser(). Then you could typically check .type and use appropriate read function. (Though be aware of the slackness in MIME type. As in: do not rely on it, but use it as a hint.)
if (file.type.match(/application\/javascript|text\/.*/)) {
// Use text read
} else if (file.type.match(/^image\/.*/)) {
// Use binary read, read as dataURL etc.
} else ...

$scope.parse = function () {
for(var i = 0; i < $scope.files.length; i++) {
var fileReader = new FileReader();
fileReader.onload = function (e) {
console.log(fileReader.result);
};
fileReader.onerror = function(err) {
console.log(err);
};
fileReader.readAsText($scope.files[i]);
}
};

Below is working on mine, this is typescript
You should convert FileEntry To Standard File, before passing to FileReader.
const convertFileEntryToStandardFile = async (files: any[]) => {
if (files) {
let plainFiles = [];
files.forEach(f=> {
let plainFile = readFile(f);
plainFiles.push(plainFile);
});
return plainFiles;
}
}
const readFile = async (fileEntry: any) => {
return await new Promise((resolve, reject) => fileEntry.file(resolve, reject));
}

Related

Async/await with FileReader issue

I have this code to read an Excel sheet into an array of objects. Everything seems to work fine except that readed.onload is performed only when I try to access the data that are not yet loaded (i.e., I suspect that reader.readAsArrayBuffer(file) did not have the time to fire the .onload event, am I right?). Then data are correctly loaded but the program stops at the end of loadFile(file) (i.e., after the resolve statement) seemingly because the call stack is empty.
To be complete, loadFile(file) is called by launchAll() which was called by a .onclick event.
I searched for similar but none reported such a program stop.
I cannot figure out what is going on!
function launchAll() {
var files = document.getElementById('file_upload').files;
if (files.length == 0) {
alert("Please choose any file...");
return;
}
loadFile(files[0]
createEmptyTree() // Creates forefather and foremother
createTree() // Creates DAGs using mappedData (a global variable)
}
async function loadFile(file) {
try {
let fileLoaded = await readFileAsync(file)
console.log("File loaded !!", fileLoaded)
} catch (err) {
console.log("Error during loading ", err)
}
};
function readFileAsync(file) {
return new Promise((resolve, reject) => {
let reader = new FileReader()
reader.onload = (event) => {
var data = event.target.result;
var workbook = XLSX.read(data, {
type: 'binary'
});
var roa = XLSX.utils.sheet_to_row_object_array(workbook.Sheets[workbook.SheetNames[0]]);
if (roa.length > 0) {
for (i = 0; i < roa.length; i++) {
mappedData.push(mapNode(roa[i], i))
}
}
resolve(event)
}
reader.onerror = (error) => {
reject(error)
};
reader.readAsArrayBuffer(file)
})
}
You need to await loadFile:
// Add async
async function launchAll() {
// ...
// Await loadFile
await loadFile(files[0])
createEmptyTree() // Creates forefather and foremother
createTree() // Creates DAGs using mappedData (a global variable)
}

Nodejs: Loop Through File and Parse PDFs using Callback in Sync

I am new to node so am struggling quite a bit with the Async nature of it.
I am trying to create a script that will parse the pdfs inside a directory and output them in txt format in another directory.
To do this, I am using fs and pdf2json npm packages. I am passing the parseData function as a callback in the loopingFiles function. The only problem I am having is the async nature of node.
It will loop through all the files at the same time and the output is then a jumbled mess in the last file index.
I would like to process this synchronously such that it will wait once the data is finished parsing to write to the txt and then loop again.
I have tried promises but to no avail. Any help would be much appreciated!
var fs = require('fs'),
PDFParser = require("pdf2json");
let pdfParser = new PDFParser(this,1);
var parseData = function(pdf, index) {
txtFile = "/Users/janet/node/pdf/Destination/".concat(index.toString().concat(".txt"))
pdfFile = "/Users/janet/node/pdf/Source/".concat(pdf);
pdfParser.loadPDF(pdfFile);
// Parsing the pdf file in question
pdfParser.on("pdfParser_dataError", errData => console.error(errData.parserError) );
pdfParser.on("pdfParser_dataReady", pdfData => {
fs.writeFile(txtFile, pdfParser.getRawTextContent());
});
};
var loopingFiles = function(callback) {
fs.readdir("/Users/janet/node/pdf/Source", function (err, files) {
if (err) {
console.log(err);
} else {
files.forEach( function(file, index) {
callback(file, index);
});
};
});
};
loopingFiles(parseData);
Something like this?
var fs = require("fs"),
PDFParser = require("pdf2json");
let pdfParser = new PDFParser(this, 1);
var parseData = function(pdfs, index = 0) {
// finished
if (index >= pdfs.length) return;
let pdf = pdfs[index];
txtFile = "/Users/janet/node/pdf/Destination/".concat(
index.toString().concat(".txt")
);
pdfFile = "/Users/janet/node/pdf/Source/".concat(pdf);
// Parsing the pdf file in question
pdfParser.on("pdfParser_dataError", errData => {
console.error(errData.parserError)
// not sure if you want to call this here to keep going or not
parseData(pdfs, index + 1);
});
pdfParser.on("pdfParser_dataReady", pdfData => {
fs.writeFile(txtFile, pdfParser.getRawTextContent(), function() {
// when we're all done, call this function again, with the index of the next pdf
parseData(pdfs, index + 1);
});
});
pdfParser.loadPDF(pdfFile);
};
var loopingFiles = function(callback) {
fs.readdir("/Users/janet/node/pdf/Source", function(err, files) {
if (err) {
console.log(err);
} else {
callback(files, 0);
}
});
};
loopingFiles(parseData);
the main difference is passing the whole array of pdfs to the function with an index, and only calling that function again with an incremented index once the current one is completed

Node.js - How to make a function wait to finish before continuing

I have a function that looks like this:
const XML = '.xml code';
var array = [];
var i = 0; //counter
XMLExtract(XML, 'loc', false, (error, element) => {
if (error) {
throw new Error(error);
}
array[i] = element;
console.log(array[i]);
function_name(array[i]); //imported function from external .js
i++;
});
Basically I want to run function() to return the response that it gives and then run function() again with the new parameter. However, the above code doesn't work, it just overlaps.
I've also checked previous solutions: https://stackoverflow.com/a/5010339 but I think I don't really know how to implement it. Any suggestion?
UPDATE: external.js
module.exports = function() {
this.function_name = function() {
(async function() {
...
await instance.exit();
}());
};
};

How to wait for asynchronous function result?

Following is my piece of code which read file from the path specified
for (var i in e.target.files) {
var reader = new FileReader();
reader.onload = function (e) {
alert("File loaded successfully");
var output = e.target.result;
// console.log("output: "+output);
}
reader.log is asyncronous function what i want is to wait until reader.load event is fired then move to next iteration.
I also forcefully stop this by infinite for loop but my browser crashes on this. i have also tries settimeout and setinterval method but all in vain. i just to stop until reader.load event is fires and then move to next iteration.
JavaScript is built to be asynchronous. If low-level developer decided that some function need to be async, there is nothing you can do, and you should not, actually. Probably, it can take some time, and user will see his browser (or other runtime environment) hanged.
You should restructure the code, so you don't wait, but fire a callback for each asynchronous event, that would increment a counter, and do the function again. Something like:
var files = [],
fileCount = 0,
currentFile = 0;
for (var i in e.target.files) {
if (e.target.files.hasOwnProperty(i)) {
fileCount++;
}
}
function allLoaded() {
// process data.
}
function loadHandler(loadEvent) {
files.push(loadEvent.target);
loadNext();
}
(function loadNext() {
if (currentFile < fileCount) {
currentFile++;
var reader = new FileReader();
reader.onload = loadHandler;
} else {
allLoaded();
}
})();
Assuming your code snippet is missing a call similar to reader.log(i) the solution is as follows:
var currentItemIndex = 0;
if (e.target.files.length > currentItemIndex) {
readNext(e.target.files[currentItemIndex]);
}
else {
endOfWait();
}
function readNext(item) {
currentItemIndex ++;
var reader = new FileReader();
reader.onload = function (x) {
alert("File loaded successfully");
var output = x.target.result;
if (e.target.files.length > currentItemIndex) {
readNext(e.target.files[currentItemIndex]);
}
else {
endOfWait();
}
}
reader.log(item);
}
function endOfWait() {
// put code here that executes once the wait is over
}

JavaScript Exception/Error Handling Not Working

This might be a little hard to follow.
I've got a function inside an object:
f_openFRHandler: function(input) {
console.debug('f_openFRHandler');
try{
//throw 'foo';
DragDrop.FileChanged(input);
//foxyface.window.close();
}
catch(e){
console.error(e);
jQuery('#foxyface_open_errors').append('<div>Max local storage limit reached, unable to store new images in your browser. Please remove some images and try again.</div>');
}
},
inside the try block it calls:
this.FileChanged = function(input) {
// FileUploadManager.addFileInput(input);
console.debug(input);
var files = input.files;
for (var i = 0; i < files.length; i++) {
var file = files[i];
if (!file.type.match(/image.*/)) continue;
var reader = new FileReader();
reader.onload = (function(f, isLast) {
return function(e) {
if (files.length == 1) {
LocalStorageManager.addImage(f.name, e.target.result, false, true);
LocalStorageManager.loadCurrentImage();
//foxyface.window.close();
}
else {
FileUploadManager.addFileData(f, e.target.result); // add multiple files to list
if (isLast) setTimeout(function() { LocalStorageManager.loadCurrentImage() },100);
}
};
})(file, i == files.length - 1);
reader.readAsDataURL(file);
}
return true;
LocalStorageManager.addImage calls:
this.setItem = function(data){
localStorage.setItem('ImageStore', $.json_encode(data));
}
localStorage.setItem throws an error if too much local storage has been used. I want to catch that error in f_openFRHandler (first code sample), but it's being sent to the error console instead of the catch block. I tried the following code in my Firebug console to make sure I'm not crazy and it works as expected despite many levels of function nesting:
try{
(function(){
(function(){
throw 'foo'
})()
})()
}
catch(e){
console.debug(e)
}
Any ideas?
var reader = new FileReader();
reader.onload = (function(f, isLast) {
Likely that's your problem right there - the FileReader probably calls onload asynchronously, at a time when your try/catch is no longer in scope. There may be a separate error handler function available on the FileReader interface, or you might need to move the try/catch into the anonymous function you're passing to onread().
I think the problem is that the error is happening later, after your f_openFRHandler function has completed. Note that the function where LocalStorageManager.addImage is being called is being set as the onload handler on the reader, not being called immediately. It gets called later, asynchronously, when the data is loaded.
You'll need to put your try..catch inside the anonymous function being created and assigned to that event, e.g.:
this.FileChanged = function(input) {
// FileUploadManager.addFileInput(input);
console.debug(input);
var files = input.files;
for (var i = 0; i < files.length; i++) {
var file = files[i];
if (!file.type.match(/image.*/)) continue;
var reader = new FileReader();
reader.onload = (function(f, isLast) {
return function(e) {
try { // ADDED
if (files.length == 1) {
LocalStorageManager.addImage(f.name, e.target.result, false, true);
LocalStorageManager.loadCurrentImage();
//foxyface.window.close();
}
else {
FileUploadManager.addFileData(f, e.target.result); // add multiple files to list
if (isLast) setTimeout(function() { LocalStorageManager.loadCurrentImage() },100);
}
}
catch (err) { // ADDED
// YOUR HANDLING HERE
}
};
})(file, i == files.length - 1);
reader.readAsDataURL(file);
}
return true;
};
Your (excellent) test case makes the call synchronously, which is why the error is caught when you try it. This is a closer model to what's actually happening:
try{
(function(){
setTimeout(function(){ // Use setTimeout to model asynchronicity
throw 'foo'
}, 100);
})()
}
catch(e){
console.debug(e)
}

Categories

Resources