Javascript split blob as strings - javascript

Say you parse a text file with fileReader:
function show() {
var file = document.getElementById('myFile');
var data = file.files[0];
var fileRead = new FileReader();
fileRead.onload = function() { document.getElementById('out').appendChild(document.createTextNode(' ' + fileRead.result)) }
fileRead.readAsText(data);
}
How do you split a blob object (raw data) with the split function which works only on strings?
If I convert the blob to string and then call readAsText it reasonably complains that the data variable (containing text) is not a blob object.
So, basically I want to use the split function on the blob text object.

You can just do it in the onload callback.
var file = document.getElementById('myfile');
var data = file.files[0];
var var fileReader = new FileReader();
fileReader.onload = function() {
let strings = fileReader.result.split(' ');
strings.forEach(function(string) {
//Your code here
})
}
fileReader.readAsText(data)
If you want blob objects representing each split string, you would have to build the blob objects in the foreach loop.

Related

Taking an image and converting it into BLOB for MySQL

I am trying to make a JavaScript that would take an image file and covert it into BLOB (by converting the file into Base64 first and then into BLOB), my project doesn't have a support for toBlob() so I have found different convering steps and put them together and they work to a point where I have to pass the BLOB from the function where its made out for the Mysql part of code that takes care of communicating with the database. (I have that fully working). Now I only need to find a way how to connect them through a variable that saves the results of the imageforQuery function.
My code so far is this:
let base64String = "";
function imageforQuery(imageid) {
//takes file and converts to Base64
var file = document.getElementById(imageid).files[0];
var reader = new FileReader();
console.log("next");
imgFileFrontBlob = "";
reader.onload = function () {
base64String = reader.result.replace("data:", "")
.replace(/^.+,/, "");
// console.log(base64String);
base64String = 'data:image/jpeg;base64,'+ base64String;
console.log(base64String);
//converts Base64 into BLOB
var binary = atob(base64String.split(',')[1]);
console.log(binary);
var array = [];
for(var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
var imgFileFrontBlob = new Blob([new Uint8Array(array)], {type: 'image/png'});
console.log(imgFileFrontBlob);
return imgFileFrontBlob
}
reader.readAsDataURL(file);
};
by experimenting with console.log() at different stages and return I have found out that I can't pass the converted BLOB result out, as the function imageforQuery() only returns what is after reader.readAsDataURL(file); and I don't know of a way of getting that result out.
––––––––––––––ADDITIONAL PROBLEMS I HAVE ENCOUNTERED––––––––––––––
okay so thanks to Emiel Zuurbier (Thank you!) I have managed to rewrite my code with the help of his solution. However as much as it helped one part of the problem, it didn't help with the JavaScript Blob object as we found out it is not the exact same thing as SQL BLOB.
Now the problem is that upon trying to send the Blob object in a SQL query, this resulted in just sending pure text "[Blob object]".
But I am using JavaScript successfully to pull the data from a BLOB field from my database and convert it into Base64 images from that data that was stored in the BLOB in a different part of my application. The code for that is below:
var converterEngine = function (input) {
// fn BLOB => Binary => Base64 ?
var uInt8Array = new Uint8Array(input),
i = uInt8Array.length;
var biStr = []; //new Array(I);
while (i--) { biStr[i] = String.fromCharCode(uInt8Array[i]); }
var base64 = window.btoa(biStr.join(''));
return base64;
};
What I need to do is just reverse this and in theory, it should get me the same data that I receive from the database.
My reversal code is below:
// this is the inside bit of code from the first problem that is solved and the
// typeOfData variable is parsed into the function in imageforQuery() as a second input
// variable (in other words its not to be of concern)
reader.onload = function () {
let base64String = reader.result.replace("data:", "").replace(/^.+,/, "");
base64String = "data:" + typeOfData + ";base64," + base64String;
var binary = atob(base64String.split(",")[1]);
// console.log(binary);
var array = [];
for (var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
var ourArray = new Uint8Array(array);
resolve(ourArray);
};
However, as I mentioned the data that comes out (ourArray) isn't actually identical to the original file from the BLOB in the database so my code doesn't function correctly and I don't know why. Any ideas where I've made a mistake?
Base64 is simply ascii text. So MySQL's datatype BLOB or TEXT would work. That is, after converting to Base64, don't worry about "convert to blob"; it is not necessary.
That is, you can probably replace the code from //converts ... through return ... by simply
return base64String;
You can wrap the FileReader instance and calls inside of a Promise. Return the Promise immediately. In the reader.onload function call resolve() to exit the Promise with a value.
function imageforQuery(imageid) {
return new Promise(resolve => {
var file = document.getElementById(imageid).files[0];
var reader = new FileReader();
reader.onload = function () {
let base64String = reader.result.replace("data:", "").replace(/^.+,/, "");
base64String = "data:image/jpeg;base64," + base64String;
var binary = atob(base64String.split(",")[1]);
var array = [];
for (var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
var imgFileFrontBlob = new Blob([new Uint8Array(array)], {
type: "image/png",
});
resolve(imgFileFrontBlob);
};
reader.readAsDataURL(file);
});
}
This results in being able to use your function like here below. imageforQuery is called, returns a Promise. When the promise is finished (meaning resolve is called) the function in the then method will run.
imageforQuery(imageId).then(imgFileFrontBlob => {
// Use your blob here.
saveToDB(imgFileFrontBlob); // Example of how you would use it.
});
Or use it with async / await.
(async () => {
function imageforQuery(imageid) {
...
}
// Here we can wait for imageforQuery to finish and save the variable.
const imgFileFrontBlob = await imageforQuery(imageId);
saveToDB(imgFileFrontBlob); // Example of how you would use it.
})()

Reader onload function not adding split lines to predefined Array

I am trying to load a file using the input tag in html with type 'file'
<input type="file">
And then with this file, I am trying to split each line and return the resulting array into an array called lines.
I have tried moving the console.log(lines) around but I only get the correct result when the console.log is inside the .onload function.
var input = document.querySelector('input[type="file"]')
input.addEventListener('change', function (e) {
let lines = new Array();
console.log(input.files)
const reader = new FileReader()
reader.onload = function () {
lines = reader.result.split('\n');
}
reader.readAsText(input.files[0]);
console.log(lines)
})
How can i ensure that the lines array has had the correct split lines put into it so that I can then use the lines array in other parts of my overall function
just try push split value in array. it may work for you.
var input = document.querySelector('input[type="file"]')
let lines = [];
input.addEventListener('change', function (e) {
const reader = new FileReader()
reader.onload = function () {
lines.push(reader.result.split('\n'));
}
reader.readAsText(input.files[0]);
console.log(lines);
})
<input type="file">

Proper way to read a file using FileReader() to generate an md5 hash string from image files?

I'm currently doing this (see snippet below) to get an md5 hash string for the image files I'm uploading (I'm using the hash as fileNames):
NOTE: I'm using the md5 package to generate the hash (it's loaded into the snippet).
There are 4 available methods on FileReader() to read the files. They all seem to produce good results.
readAsText(file)
readAsBinaryString(file);
readAsArrayBuffer(file);
readAsDataURL(file);
Which is should I be using in this case and why? Can you also explain the difference between them?
function onFileSelect(e) {
const file = e.target.files[0];
const reader1 = new FileReader();
const reader2 = new FileReader();
const reader3 = new FileReader();
const reader4 = new FileReader();
reader1.onload = (event) => {
const fileContent = event.target.result;
console.log('Hash from "readAsText()": ');
console.log(md5(fileContent));
}
reader2.onload = (event) => {
const fileContent = event.target.result;
console.log('Hash from "readAsBinaryString()": ');
console.log(md5(fileContent));
}
reader3.onload = (event) => {
const fileContent = event.target.result;
console.log('Hash from "readAsArrayBuffer()": ');
console.log(md5(fileContent));
}
reader4.onload = (event) => {
const fileContent = event.target.result;
console.log('Hash from "readAsDataURL()": ');
console.log(md5(fileContent));
}
reader1.readAsText(file);
reader2.readAsBinaryString(file);
reader3.readAsArrayBuffer(file);
reader4.readAsDataURL(file);
}
.myDiv {
margin-bottom: 10px;
}
<script src="https://cdn.jsdelivr.net/npm/js-md5#0.7.3/src/md5.min.js"></script>
<div class="myDiv">Pick an image file to see the 4 hash results on console.log()</div>
<input type='file' onChange="onFileSelect(event)" accept='.jpg,.jpeg,.png,.gif' />
Use readAsArrayBuffer.
readAsBinaryString() and readAsDataURL() will make your computer do a lot more work than what needs to be done:
read the blob as binary stream
convert to UTF-16 / base64 String (remember strings are not mutable in js, any operation you do on it will actually create a copy in memory)
[ pass to your lib ]
convert to binary string
process the data
Also, it seems your library doesn't handle data URLs and fails on UTF-16 strings.
readAsText() by default will try to interpret you binary data as an UTF-8 text sequence, which is pretty bad for binary data like raster image:
// generate some binary data
document.createElement('canvas').toBlob(blob => {
const utf8_reader = new FileReader();
const bin_reader = new FileReader();
let done = 0;
utf8_reader.onload = bin_reader.onload = e => {
if(++done===2) {
console.log('same results: ', bin_reader.result === utf8_reader.result);
console.log("utf8\n", utf8_reader.result);
console.log("utf16\n", bin_reader.result);
}
}
utf8_reader.readAsText(blob);
bin_reader.readAsBinaryString(blob);
});
readAsArrayBuffer on the other hand will just allocate the binary data as is in memory. Simple I/O, no processing.
To manipulate this data, we can use TypedArrays views over this binary data, which being only views, won't create any overhead either.
And if you look at the library you are using, they will anyway pass your input to such an Uint8Array to further process it. However beware they apparently need you to pass an Uint8Array view of this ArrayBuffer instead of the nude ArrayBuffer directly.

Convert binary file to JavaScript string and then to Uint8Array

I'm trying to create a web application that can be used via a file:// URI. This means that I can't use AJAX to load binary files (without turning off security features in the browser, which I don't want to do as a matter of principle).
The application uses a SQLite database. I want to provide the database to a sql.js constructor, which requires it in Uint8Array format.
Since I can't use AJAX to load the database file, I could instead load it with <input type="file"> and FileReader.prototype.readAsArrayBuffer and convert the ArrayBuffer to a Uint8Array. And that's working with the following code:
input.addEventListener('change', function (changeEvent) {
var file = changeEvent.currentTarget.files[0];
var reader = new FileReader();
reader.addEventListener('load', function (loadEvent) {
var buffer = loadEvent.target.result;
var uint8Array = new Uint8Array(buffer);
var db = new sql.Database(uint8Array);
});
reader.readAsArrayBuffer(file);
});
However, <input type="file"> requires user interaction, which is tedious.
I thought I might be able to work around the no-AJAX limitation by using a build tool to convert my database file to a JavaScript object / string and generate a ".js" file providing the file contents, and then convert the file contents to a Uint8Array, somehow.
Psuedocode:
// In Node.js:
var fs = require('fs');
var sqliteDb = fs.readFileSync('path/to/sqlite.db');
var string = convertBufferToJsStringSomehow(sqliteDb);
fs.writeFileSync('build/db.js', 'var dbString = "' + string + '";');
// In the browser (assume "build/db.js" has been loaded via a <script> tag):
var uint8Array = convertStringToUint8ArraySomehow(dbString);
var db = new sql.Database(uint8Array);
In Node.js, I've tried the following:
var TextEncoder = require('text-encoding').TextEncoder;
var TextDecoder = require('text-encoding').TextEncoder;
var sql = require('sql.js');
var string = new TextDecoder('utf-8').decode(fs.readFileSync('path/to/sqlite.db'));
// At this point, I would write `string` to a ".js" file, but for
// the sake of determining if converting back to a Uint8Array
// would work, I'll continue in Node.js...
var uint8array = new TextEncoder().encode(string);
var db = new sql.Database(uint8array);
db.exec('SELECT * FROM tablename');
But when I do that, I get the error "Error: database disk image is malformed".
What am I doing wrong? Is this even possible? The database disk image isn't "malformed" when I load the same file via FileReader.
Using the following code, I was able to transfer the database file's contents to the browser:
// In Node.js:
var fs = require('fs');
var base64 = fs.readFileSync('path/to/sqlite.db', 'base64');
fs.writeFileSync('build/db.js', 'var dbString = "' + base64 + '";');
// In the browser (assume "build/db.js" has been loaded via a <script> tag):
function base64ToUint8Array (string) {
var raw = atob(string);
var rawLength = raw.length;
var array = new Uint8Array(new ArrayBuffer(rawLength));
for (var i = 0; i < rawLength; i += 1) {
array[i] = raw.charCodeAt(i);
}
return array;
}
var db = new sql.Database(base64ToUint8Array(dbString));
console.log(db.exec('SELECT * FROM tablename'));
And that's working with the following code:
input.addEventListener('change', function (changeEvent) {
var file = changeEvent.currentTarget.files[0];
var reader = new FileReader();
reader.addEventListener('load', function (loadEvent) {
var buffer = loadEvent.target.result;
var uint8Array = new Uint8Array(buffer);
var db = new sql.Database(uint8Array);
});
reader.readAsArrayBuffer(file);
});
However, <input type="file"> requires user interaction, which is
tedious.
Using current working approach would be less tedious than attempting to create workarounds. If user intends to use application, user can select file from their filesystem to run application.

HTML5 FileReader + WebSQL

Ist it possible to parse a csv-file with FileReader an write it a WebSql Table?
The FileReader.readAsText() method will give you a string from the file which you can then split() to get the lines and csv cells. Check out readAsText() for more details and try pasting the following in to the interactive example:
<script id='csv' type='text/plain'>
apple,1,$1.00
banana,4,$0.20
orange,3,$0.79
</script>
<script>
// Use a Blob to simulate a File
var csv = document.getElementById('csv').textContent.trim();
var file = new Blob([csv]);
var reader = new FileReader();
reader.onload = function(event){
var reader = event.target;
var text = reader.result;
var lines = text.split('\n');
lines.forEach(function(line) {
var parts = line.split(',');
// process the cells in the csv
console.log(parts[0], parts[1], parts[2]);
});
};
reader.readAsText(file);
</script>
Yes, that should not be a problem at all :)
Just use FileReader.readAsText() to grab the csv file content, and from there it should be a breeze
With Screw-FileReader
// Use a Blob to simulate a File
let blob = new Blob([
`apple,1,$1.00
banana,4,$0.20
orange,3,$0.79`
])
blob.text().then(text => {
var lines = text.split('\n')
for (let line of lines) {
let parts = line.split(',')
// process the cells in the csv
console.log(parts)
}
})
<script src="https://cdn.rawgit.com/jimmywarting/Screw-FileReader/master/index.js"></script>

Categories

Resources