Convert binary file to JavaScript string and then to Uint8Array - javascript

I'm trying to create a web application that can be used via a file:// URI. This means that I can't use AJAX to load binary files (without turning off security features in the browser, which I don't want to do as a matter of principle).
The application uses a SQLite database. I want to provide the database to a sql.js constructor, which requires it in Uint8Array format.
Since I can't use AJAX to load the database file, I could instead load it with <input type="file"> and FileReader.prototype.readAsArrayBuffer and convert the ArrayBuffer to a Uint8Array. And that's working with the following code:
input.addEventListener('change', function (changeEvent) {
var file = changeEvent.currentTarget.files[0];
var reader = new FileReader();
reader.addEventListener('load', function (loadEvent) {
var buffer = loadEvent.target.result;
var uint8Array = new Uint8Array(buffer);
var db = new sql.Database(uint8Array);
});
reader.readAsArrayBuffer(file);
});
However, <input type="file"> requires user interaction, which is tedious.
I thought I might be able to work around the no-AJAX limitation by using a build tool to convert my database file to a JavaScript object / string and generate a ".js" file providing the file contents, and then convert the file contents to a Uint8Array, somehow.
Psuedocode:
// In Node.js:
var fs = require('fs');
var sqliteDb = fs.readFileSync('path/to/sqlite.db');
var string = convertBufferToJsStringSomehow(sqliteDb);
fs.writeFileSync('build/db.js', 'var dbString = "' + string + '";');
// In the browser (assume "build/db.js" has been loaded via a <script> tag):
var uint8Array = convertStringToUint8ArraySomehow(dbString);
var db = new sql.Database(uint8Array);
In Node.js, I've tried the following:
var TextEncoder = require('text-encoding').TextEncoder;
var TextDecoder = require('text-encoding').TextEncoder;
var sql = require('sql.js');
var string = new TextDecoder('utf-8').decode(fs.readFileSync('path/to/sqlite.db'));
// At this point, I would write `string` to a ".js" file, but for
// the sake of determining if converting back to a Uint8Array
// would work, I'll continue in Node.js...
var uint8array = new TextEncoder().encode(string);
var db = new sql.Database(uint8array);
db.exec('SELECT * FROM tablename');
But when I do that, I get the error "Error: database disk image is malformed".
What am I doing wrong? Is this even possible? The database disk image isn't "malformed" when I load the same file via FileReader.

Using the following code, I was able to transfer the database file's contents to the browser:
// In Node.js:
var fs = require('fs');
var base64 = fs.readFileSync('path/to/sqlite.db', 'base64');
fs.writeFileSync('build/db.js', 'var dbString = "' + base64 + '";');
// In the browser (assume "build/db.js" has been loaded via a <script> tag):
function base64ToUint8Array (string) {
var raw = atob(string);
var rawLength = raw.length;
var array = new Uint8Array(new ArrayBuffer(rawLength));
for (var i = 0; i < rawLength; i += 1) {
array[i] = raw.charCodeAt(i);
}
return array;
}
var db = new sql.Database(base64ToUint8Array(dbString));
console.log(db.exec('SELECT * FROM tablename'));

And that's working with the following code:
input.addEventListener('change', function (changeEvent) {
var file = changeEvent.currentTarget.files[0];
var reader = new FileReader();
reader.addEventListener('load', function (loadEvent) {
var buffer = loadEvent.target.result;
var uint8Array = new Uint8Array(buffer);
var db = new sql.Database(uint8Array);
});
reader.readAsArrayBuffer(file);
});
However, <input type="file"> requires user interaction, which is
tedious.
Using current working approach would be less tedious than attempting to create workarounds. If user intends to use application, user can select file from their filesystem to run application.

Related

Taking an image and converting it into BLOB for MySQL

I am trying to make a JavaScript that would take an image file and covert it into BLOB (by converting the file into Base64 first and then into BLOB), my project doesn't have a support for toBlob() so I have found different convering steps and put them together and they work to a point where I have to pass the BLOB from the function where its made out for the Mysql part of code that takes care of communicating with the database. (I have that fully working). Now I only need to find a way how to connect them through a variable that saves the results of the imageforQuery function.
My code so far is this:
let base64String = "";
function imageforQuery(imageid) {
//takes file and converts to Base64
var file = document.getElementById(imageid).files[0];
var reader = new FileReader();
console.log("next");
imgFileFrontBlob = "";
reader.onload = function () {
base64String = reader.result.replace("data:", "")
.replace(/^.+,/, "");
// console.log(base64String);
base64String = 'data:image/jpeg;base64,'+ base64String;
console.log(base64String);
//converts Base64 into BLOB
var binary = atob(base64String.split(',')[1]);
console.log(binary);
var array = [];
for(var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
var imgFileFrontBlob = new Blob([new Uint8Array(array)], {type: 'image/png'});
console.log(imgFileFrontBlob);
return imgFileFrontBlob
}
reader.readAsDataURL(file);
};
by experimenting with console.log() at different stages and return I have found out that I can't pass the converted BLOB result out, as the function imageforQuery() only returns what is after reader.readAsDataURL(file); and I don't know of a way of getting that result out.
––––––––––––––ADDITIONAL PROBLEMS I HAVE ENCOUNTERED––––––––––––––
okay so thanks to Emiel Zuurbier (Thank you!) I have managed to rewrite my code with the help of his solution. However as much as it helped one part of the problem, it didn't help with the JavaScript Blob object as we found out it is not the exact same thing as SQL BLOB.
Now the problem is that upon trying to send the Blob object in a SQL query, this resulted in just sending pure text "[Blob object]".
But I am using JavaScript successfully to pull the data from a BLOB field from my database and convert it into Base64 images from that data that was stored in the BLOB in a different part of my application. The code for that is below:
var converterEngine = function (input) {
// fn BLOB => Binary => Base64 ?
var uInt8Array = new Uint8Array(input),
i = uInt8Array.length;
var biStr = []; //new Array(I);
while (i--) { biStr[i] = String.fromCharCode(uInt8Array[i]); }
var base64 = window.btoa(biStr.join(''));
return base64;
};
What I need to do is just reverse this and in theory, it should get me the same data that I receive from the database.
My reversal code is below:
// this is the inside bit of code from the first problem that is solved and the
// typeOfData variable is parsed into the function in imageforQuery() as a second input
// variable (in other words its not to be of concern)
reader.onload = function () {
let base64String = reader.result.replace("data:", "").replace(/^.+,/, "");
base64String = "data:" + typeOfData + ";base64," + base64String;
var binary = atob(base64String.split(",")[1]);
// console.log(binary);
var array = [];
for (var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
var ourArray = new Uint8Array(array);
resolve(ourArray);
};
However, as I mentioned the data that comes out (ourArray) isn't actually identical to the original file from the BLOB in the database so my code doesn't function correctly and I don't know why. Any ideas where I've made a mistake?
Base64 is simply ascii text. So MySQL's datatype BLOB or TEXT would work. That is, after converting to Base64, don't worry about "convert to blob"; it is not necessary.
That is, you can probably replace the code from //converts ... through return ... by simply
return base64String;
You can wrap the FileReader instance and calls inside of a Promise. Return the Promise immediately. In the reader.onload function call resolve() to exit the Promise with a value.
function imageforQuery(imageid) {
return new Promise(resolve => {
var file = document.getElementById(imageid).files[0];
var reader = new FileReader();
reader.onload = function () {
let base64String = reader.result.replace("data:", "").replace(/^.+,/, "");
base64String = "data:image/jpeg;base64," + base64String;
var binary = atob(base64String.split(",")[1]);
var array = [];
for (var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
var imgFileFrontBlob = new Blob([new Uint8Array(array)], {
type: "image/png",
});
resolve(imgFileFrontBlob);
};
reader.readAsDataURL(file);
});
}
This results in being able to use your function like here below. imageforQuery is called, returns a Promise. When the promise is finished (meaning resolve is called) the function in the then method will run.
imageforQuery(imageId).then(imgFileFrontBlob => {
// Use your blob here.
saveToDB(imgFileFrontBlob); // Example of how you would use it.
});
Or use it with async / await.
(async () => {
function imageforQuery(imageid) {
...
}
// Here we can wait for imageforQuery to finish and save the variable.
const imgFileFrontBlob = await imageforQuery(imageId);
saveToDB(imgFileFrontBlob); // Example of how you would use it.
})()

Javascript formdata: encrypt files before appending

I need to modify existing frontend (angular) code that involves uploading files to a server. Now the files need to be encrypted before being uploaded.
The current approach uses FormData to append a number of files and send them in a single request as shown below:
function uploadFiles(wrappers){
var data = new FormData();
// Add each file
for(var i = 0; i < wrappers.length; i++){
var wrapper = wrappers[i];
var file = wrapper.file;
data.append('file_' + i, file);
}
$http.post(uri, data, requestCfg).then(
/*...*
I have been using Forge in other projects, but never in this sort of context and don't really see how to encrypt files on the fly and still append them as FormData contents.
Forge provides an easy API:
var key = forge.random.getBytesSync(16);
var iv = forge.random.getBytesSync(8);
// encrypt some bytes
var cipher = forge.rc2.createEncryptionCipher(key);
cipher.start(iv);
cipher.update(forge.util.createBuffer(someBytes));
cipher.finish();
var encrypted = cipher.output;
The backend recieves files using Formidable and all the file hanlding is already wired. I would thus like to stick to using the existing front-end logic but simply insert the encryption logic. In that, it's not the entire formdata that must be encrypted... I haven't found a good lead yet to approach this.
Suggestions are very welcome!
Ok, found a solution and added the decrypt code as well. This adds a layer of async code.
function appendFile(aFile, idx){
// Encrypt if a key was provided for this protocol test
if(!key){
data.append('dicomfile_' + idx, file);
appendedCount++;
onFileAppended();
}
else{
var reader = new FileReader();
reader.onload = function(){
// 1. Read bytes
var arrayBuffer = reader.result;
var bytes = new Uint8Array(arrayBuffer); // byte array aka uint8
// 2. Encrypt
var cipher = forge.cipher.createCipher('AES-CBC', key);
cipher.start({iv: iv});
cipher.update(forge.util.createBuffer(bytes));
cipher.finish();
// 3. To blob (file extends blob)
var encryptedByteCharacters = cipher.output.getBytes(); // encryptedByteCharacters is similar to an ATOB(b64) output
// var asB64 = forge.util.encode64(encryptedBytes);
// var encryptedByteCharacters = atob(asB64);
// Convert to Blob object
var blob = byteCharsToBlob(encryptedByteCharacters, "application/octet-stream", 512);
// 4. Append blob
data.append('dicomfile_' + idx, blob, file.name);
// Decrypt for the sake of testing
if(true){
var fileReader = new FileReader();
fileReader.onload = function() {
arrayBuffer = this.result;
var bytez = new Uint8Array(arrayBuffer);
var decipher = forge.cipher.createDecipher('AES-CBC', key);
decipher.start({iv: iv});
decipher.update(forge.util.createBuffer(bytez));
decipher.finish();
var decryptedByteCharacters = decipher.output.getBytes();
var truz = bytes === decryptedByteCharacters;
var blob = byteCharsToBlob(decryptedByteCharacters, "application/octet-stream", 512);
data.append('decrypted_' + idx, blob, file.name + '.decrypted');
appendedCount++;
onFileAppended();
};
fileReader.readAsArrayBuffer(blob);
}
else{
// z. Resume processing
appendedCount++;
onFileAppended();
}
}
// Read file
reader.readAsArrayBuffer(aFile);
}
}
function onFileAppended(){
// Only proceed when all files were appended and optionally encrypted (async)
if(appendedCount !== wrappers.length) return;
/* resume processing, upload or do whathever */

How to checksum the file to be uploaded with javascript?

I want to checksum the files at browser side before uploading, then checksum and compare at server side to make sure the consistent. But how can I get the pure binary data of the file and checksum it? I tried the way below, but doesn't work:
let fileSelect = document.getElementById('file')
let files = fileSelect.files
let file = files[0]
var r = new FileReader();
r.onload = function(){ console.log(r.result); };
r.readAsArrayBuffer(file);
var file_sha1 = sha1(r.result)
The library you are using seems to support string input only.
Find a library support binary input. Eg. js-sha1. And you should hash it in the callback.
var reader = new FileReader();
reader.onload = function (event) {
var file_sha1 = sha1(event.target.result)
};
reader.readAsArrayBuffer(file);

Excel to JSON javascript code?

I want to convert excel sheet data to json. It has to be dynamic, so there is an upload button where user uploads the excel sheet and the data is then converted into json. Could you please provide me the javascript code? I tried SheetJS but couldn't figure out. I would prefer something straight forward :)
I really appreciate your help!
NOTE: Not 100% Cross Browser
Check browser compatibility # http://caniuse.com/#search=FileReader
as you will see people have had issues with the not so common browsers, But this could come down to the version of the browser.. I always recommend using something like caniuse to see what generation of browser is supported... This is only a working answer for the user, not a final copy and paste code for people to just use..
The Fiddle: http://jsfiddle.net/d2atnbrt/3/
THE HTML CODE:
<input type="file" id="my_file_input" />
<div id='my_file_output'></div>
THE JS CODE:
var oFileIn;
$(function() {
oFileIn = document.getElementById('my_file_input');
if(oFileIn.addEventListener) {
oFileIn.addEventListener('change', filePicked, false);
}
});
function filePicked(oEvent) {
// Get The File From The Input
var oFile = oEvent.target.files[0];
var sFilename = oFile.name;
// Create A File Reader HTML5
var reader = new FileReader();
// Ready The Event For When A File Gets Selected
reader.onload = function(e) {
var data = e.target.result;
var cfb = XLS.CFB.read(data, {type: 'binary'});
var wb = XLS.parse_xlscfb(cfb);
// Loop Over Each Sheet
wb.SheetNames.forEach(function(sheetName) {
// Obtain The Current Row As CSV
var sCSV = XLS.utils.make_csv(wb.Sheets[sheetName]);
var oJS = XLS.utils.sheet_to_row_object_array(wb.Sheets[sheetName]);
$("#my_file_output").html(sCSV);
console.log(oJS)
});
};
// Tell JS To Start Reading The File.. You could delay this if desired
reader.readAsBinaryString(oFile);
}
This also requires https://cdnjs.cloudflare.com/ajax/libs/xls/0.7.4-a/xls.js to convert to a readable format, i've also used jquery only for changing the div contents and for the dom ready event.. so jquery is not needed
This is as basic as i could get it,
EDIT - Generating A Table
The Fiddle: http://jsfiddle.net/d2atnbrt/5/
This second fiddle shows an example of generating your own table, the key here is using sheet_to_json to get the data in the correct format for JS use..
One or two comments in the second fiddle might be incorrect as modified version of the first fiddle.. the CSV comment is at least
Test XLS File: http://www.whitehouse.gov/sites/default/files/omb/budget/fy2014/assets/receipts.xls
This does not cover XLSX files thought, it should be fairly easy to adjust for them using their examples.
js-xlsx library makes it easy to convert Excel/CSV files into JSON objects.
Download the xlsx.full.min.js file from here.
Write below code on your HTML page
Edit the referenced js file link (xlsx.full.min.js) and link of Excel file
<!doctype html>
<html>
<head>
<title>Excel to JSON Demo</title>
<script src="xlsx.full.min.js"></script>
</head>
<body>
<script>
/* set up XMLHttpRequest */
var url = "http://myclassbook.org/wp-content/uploads/2017/12/Test.xlsx";
var oReq = new XMLHttpRequest();
oReq.open("GET", url, true);
oReq.responseType = "arraybuffer";
oReq.onload = function(e) {
var arraybuffer = oReq.response;
/* convert data to binary string */
var data = new Uint8Array(arraybuffer);
var arr = new Array();
for (var i = 0; i != data.length; ++i) arr[i] = String.fromCharCode(data[i]);
var bstr = arr.join("");
/* Call XLSX */
var workbook = XLSX.read(bstr, {
type: "binary"
});
/* DO SOMETHING WITH workbook HERE */
var first_sheet_name = workbook.SheetNames[0];
/* Get worksheet */
var worksheet = workbook.Sheets[first_sheet_name];
console.log(XLSX.utils.sheet_to_json(worksheet, {
raw: true
}));
}
oReq.send();
</script>
</body>
</html>
Input:
Output:
The answers are working fine with xls format but, in my case, it didn't work for xlsx format. Thus I added some code here. it works both xls and xlsx format.
I took the sample from the official sample link.
Hope it may help !
function fileReader(oEvent) {
var oFile = oEvent.target.files[0];
var sFilename = oFile.name;
var reader = new FileReader();
var result = {};
reader.onload = function (e) {
var data = e.target.result;
data = new Uint8Array(data);
var workbook = XLSX.read(data, {type: 'array'});
console.log(workbook);
var result = {};
workbook.SheetNames.forEach(function (sheetName) {
var roa = XLSX.utils.sheet_to_json(workbook.Sheets[sheetName], {header: 1});
if (roa.length) result[sheetName] = roa;
});
// see the result, caution: it works after reader event is done.
console.log(result);
};
reader.readAsArrayBuffer(oFile);
}
// Add your id of "File Input"
$('#fileUpload').change(function(ev) {
// Do something
fileReader(ev);
}
#Kwang-Chun Kang Thanks Kang a lot! I found the solution is working and very helpful, it really save my day.
For me I am trying to create a React.js component that convert *.xlsx to json object when user upload the excel file to a html input tag.
First I need to install XLSX package with:
npm install xlsx --save
Then in my component code, import with:
import XLSX from 'xlsx'
The component UI should look like this:
<input
accept=".xlsx"
type="file"
onChange={this.fileReader}
/>
It calls a function fileReader(), which is exactly same as the solution provided.
To learn more about fileReader API, I found this blog to be helpful:
https://blog.teamtreehouse.com/reading-files-using-the-html5-filereader-api
This is my expansion on https://stackoverflow.com/a/52237535/5079799
While it was working/great example, I'm not using an input form, but fetching from a URL and I have other things to do after fething workbook so I needed to wrap the onload into a promise.
See --> adjust onload function to be used with async/await
Here is what I ended up w/
async function Outside_Test(){
var reso = await Get_JSON()
console.log('reso_out')
console.log(reso)
}
async function Get_JSON() {
var url = "http://MyworkbookURL"
var workbook = await Get_XLSX_As_Workbook_From_URL(url)
/* DO SOMETHING WITH workbook HERE */
var first_sheet_name = workbook.SheetNames[0];
/* Get worksheet */
var worksheet = workbook.Sheets[first_sheet_name];
var reso = (XLSX.utils.sheet_to_json(worksheet, {
raw: true
}));
return reso
}
async function Get_XLSX_As_Workbook_From_URL(url) {
const arrayBuffer = await new Promise((resolve, reject) => {
var oReq = new XMLHttpRequest();
oReq.open("GET", url, true);
oReq.responseType = "arraybuffer";
oReq.onload = () => resolve(oReq.response);
oReq.onerror = reject;
oReq.send();
});
var data = new Uint8Array(arrayBuffer);
var arr = new Array();
for (var i = 0; i != data.length; ++i) arr[i] = String.fromCharCode(data[i]);
var bstr = arr.join("");
var workbook = XLSX.read(bstr, {
type: "binary"
});
return workbook
}

Apps script write to Big Query unknown error

This is supposed to read in a CSV and then write it to bigquery. When it runs, however, nothing is written, and there are no errors logged. I read that I need to write a csv and then turn it into an Octet Stream. I am not sure whether or not this is compatible with google bigquery.
function test(){
try{
var tableReference = BigQuery.newTableReference();
tableReference.setProjectId(PROJECT_ID);
tableReference.setDatasetId(datasetId);
tableReference.setTableId(tableId);
var schema = "CUSTOMER:string, CLASSNUM:integer, CLASSDESC:string, CSR:string, CSR2:string, INSURANCE:string, REFERRALGENERAL:string, REFERRALSPECIFIC:string, NOTES:string, INMIN:integer, INHR:integer, OUTMIN:integer, OUTHR:integer, WAITMIN:integer, WAITHR:integer, DATETIMESTAMP:float, DATEYR:integer,DATEMONTH:integer, DATEDAY:integer";
var load = BigQuery.newJobConfigurationLoad();
load.setDestinationTable(tableReference);
load.setSourceUris(URIs);
load.setSourceFormat('NEWLINE_DELIMITED_JSON');
load.setSchema(schema);
load.setMaxBadRecords(0);
load.setWriteDisposition('WRITE_TRUNCATE');
var configuration = BigQuery.newJobConfiguration();
configuration.setLoad(load);
var newJob = BigQuery.newJob();
newJob.setConfiguration(configuration);
var loadr = DriveApp.getFilesByName("test.csv");
var x = loadr.next().getBlob();
Logger.log(x.getDataAsString());
var d = DriveApp.getFilesByName("test.csv");
var id = d.next().getId();
Logger.log(id);
var data = DocsList.getFileById(id).getBlob().getDataAsString();
var mediaData = Utilities.newBlob(data, 'application/octet-stream');
BigQuery.Jobs.insert(newJob, PROJECT_ID, mediaData)
}
catch(error){Logger.log("A" + error.message);}
}
Your sourceFormat is wrong for CSV files:
The format of the data files. For CSV files, specify "CSV". For
datastore backups, specify "DATASTORE_BACKUP". For newline-delimited
JSON, specify "NEWLINE_DELIMITED_JSON". The default value is CSV.
https://developers.google.com/bigquery/docs/reference/v2/jobs#configuration.load.sourceUris
On the other hand I think you don't need at all the load.setSourceUris(URIs); since you try to load from local file, and not from Google Cloud Storage. Check this python example https://developers.google.com/bigquery/loading-data-into-bigquery

Categories

Resources