Using ExcelJS, how to read data from excel file using URL? - javascript

I have excel file upload in cloud. Using node js when front end pass URL i need to read data from excel file. Below is my code,
var workbook = new ExcelJS.Workbook();
workbook.xlsx.load('https://file-examples.com/wp-content/uploads/2017/02/file_example_XLSX_10.xlsx')
.then(function() {
var worksheet = workbook.getWorksheet(sheet);
worksheet.eachRow({ includeEmpty: true }, function(row, rowNumber) {
console.log("Row " + rowNumber + " = " + JSON.stringify(row.values));
});
});
But I'm getting this error,
Can't find end of central directory : is this a zip file ? If it is, see https://stuk.github.io/jszip/documentation/howto/read_zip.html
Can someone help me to fix this?

Fixed using the below code.
const axios = require("axios");
const XLSX = require("xlsx");
const jsontoxml = require("jsontoxml");
async function testAxiosXlsx(url) {
const options = {
url,
responseType: "arraybuffer"
}
let axiosResponse = await axios(options);
const workbook = XLSX.read(axiosResponse.data);
let worksheets = workbook.SheetNames.map(sheetName => {
return { sheetName, data: XLSX.utils.sheet_to_json(workbook.Sheets[sheetName]) };
});
console.log("json:\n", JSON.stringify(worksheets), "\n\n");
console.log("xml:\n", jsontoxml(worksheets, {}));
}
testAxiosXlsx("https://file-examples.com/wp-content/uploads/2017/02/file_example_XLSX_10.xlsx");

Look at the documentation.
The load method expects you to pass a Buffer containing Excel data.
You are passing it a string, and that string contains a URL, not Excel data.
You need to fetch the data and convert it to a buffer.

Related

XLSX to CSV file convert for API call

Im trying to convert .xlsx file to .csv file before i send it to the API (because it accepts only .csv files) ;
I am using package xlsx and i got this function that converts it for me but problem is that this function will make the user download the file, and i don't want that i just want that it saves it kinda in like a object so i can use it only for the api (and don't let know the user that its converted ).
Here is code:
file.arrayBuffer().then(res => {
let data = new Uint8Array(res)
let workbook = XLSX.read(data, {type: "array"})
let first_sheet_name = workbook.SheetNames[0]
let worksheet = workbook.Sheets[first_sheet_name]
let jsonData = XLSX.utils.sheet_to_json(worksheet, {raw: false, defval: null})
let fileName;
if (file) {
fileName = file?.name.substring(0, file?.name?.indexOf("."))
}
let new_worksheet = XLSX.utils.json_to_sheet(jsonData);
let new_workbook = XLSX.utils.book_new();
XLSX.utils.book_append_sheet(new_workbook, new_worksheet, "csv_sheet")
getRandomRows(jsonData)
XLSX.writeFile(new_workbook, fileName + ".csv")
})
Was wondering if there are other options too.
Based on Readme from xlsx try to use writeFileAsync or just write instead of writeFile as it force browser to start download
I fixed it this way :
let dataToSheet = XLSX.utils.json_to_sheet(data)
const dataFileCSV = XLSX.utils.sheet_to_csv(dataToSheet, {raw: false, defval: null});
let blob = new Blob(["\ufeff", dataFileCSV]);
let fileBlob = new File([blob], "name");

Is there a way to unzip a file from a url request and parse its content in node?

I want to download a zip file from a url and parse its contents in node. I do not want to save the file on disk. The zip file is a directory of csv files that I want to process. How can I approach this? The only package that has an option for unzipping from a URL is unzipper but it does not work for me. Every other package lets you unzip a file on disk by providing the path to the file but not a url.
I am downloading the file like so:
const res = await this.get(test)
But what can I do now? There are packages like AdmZip that can extract zip files but need a path as a string to a file on disk. Is there a way I can pass/ stream my res object above to the below?
var AdmZip = require('adm-zip');
// reading archives
var zip = new AdmZip("./my_file.zip");
var zipEntries = zip.getEntries(); // an array of ZipEntry records
zipEntries.forEach(function(zipEntry) {
console.log(zipEntry.toString()); // outputs zip entries information
if (zipEntry.entryName == "my_file.txt") {
console.log(zipEntry.getData().toString('utf8'));
}
});
Here's a simple example of downloading a .zip file and unzipping using adm-zip. As #anonymouze points out, you can pass a buffer to the AdmZip constructor.
const axios = require("axios");
const AdmZip = require('adm-zip');
async function get(url) {
const options = {
method: 'GET',
url: url,
responseType: "arraybuffer"
};
const { data } = await axios(options);
return data;
}
async function getAndUnZip(url) {
const zipFileBuffer = await get(url);
const zip = new AdmZip(zipFileBuffer);
const entries = zip.getEntries();
for(let entry of entries) {
const buffer = entry.getData();
console.log("File: " + entry.entryName + ", length (bytes): " + buffer.length + ", contents: " + buffer.toString("utf-8"));
}
}
getAndUnZip('https://www.learningcontainer.com/wp-content/uploads/2020/05/sample-zip-file.zip');
In this case I'm simply using axios to download a zip file buffer, then parsing it with AdmZip.
Each entry's data can be accessed with entry.getData(), which will return a buffer.
In this case we'll see an output something like this:
File: sample.txt, length (bytes): 282, contents: I would love to try or hear the sample audio your app can produce. I do...
Here's another example, this time using node-fetch:
const fetch = require('node-fetch');
const AdmZip = require('adm-zip');
async function get(url) {
return fetch(url).then(res => res.buffer());
}
async function getAndUnZip(url) {
const zipFileBuffer = await get(url);
const zip = new AdmZip(zipFileBuffer);
const entries = zip.getEntries();
for(let entry of entries) {
const buffer = entry.getData();
console.log("File: " + entry.entryName + ", length (bytes): " + buffer.length + ", contents: " + buffer.toString("utf-8"));
}
}
getAndUnZip('https://www.learningcontainer.com/wp-content/uploads/2020/05/sample-zip-file.zip');

how to read a Blob in D3.js

I need to download a blob, then read the values and create D3 charts. So far I managed to download the blob file from Azure container. But I don't know how to get the values from it and give it to the d3.js. How much I know, D3 can read CSV files, so I tried to convert the blob to CSV file but didn't work. Is there a way to do this ?
thanks !
const AzureService = {
execute: async function() {
const accountName = STORAGE_ACCOUNT_NAME;
const blobName = "xxxxxxxxxxxxxxxxxxxxx";
const sasString = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
const containerName = "xxxxxxxxxxxxxxxxxxxxxxxxxx";
const containerURL = new azblob.ContainerURL(
`https://${accountName}.blob.core.windows.net/${containerName}?${sasString}`,
azblob.StorageURL.newPipeline(new azblob.AnonymousCredential())
);
const blobURL = azblob.BlobURL.fromContainerURL(containerURL, blobName);
const downloadBlobResponse = await blobURL.download(azblob.Aborter.none, 0);
return await downloadBlobResponse.blobBody;
}
};
getting the blob in the service
componentDidMount() {
this.file = AzureService.execute().then(data => {
this.readCSVs(data);
});
}
There I'm trying to convert the blob and use D3.csv to read
readCSVs(data) {
let contentType = "text/csv";
let csvFile = new Blob([data], { type: contentType });
let svg, productName, filtered;
d3.csv(csvFile).then(data => {
svg = d3
.select("#users_bar2")
.style("width", 500)
.style("height", 500);
productName = "CLeg";
filtered = data.filter(x => x.productName == productName);
this.histogram(svg, filtered, 500, 500, "hist", productName);
});
}
Based on the documentation available here, you can use d3.csv file to read directly from the blob (no need for a separate function to read from Azure Blob Storage).
d3.csv(`https://${accountName}.blob.core.windows.net/${containerName}?${sasString}`, row, callback)
where
row: parser function to parse a record (row) into JSON format
callback: callback function

NodeJS + ldapsj-client: problem saving thumbnailPhoto

Using the ldapsj-client module, I'm trying to save the thumbnailPhoto into a file
const auth = async () => {
const client = new LdapClient({ url: 'myaddomain' })
await client.bind('someemail#domain.com.br', 'passwaord')
const opts = {
filter: `(sAMAccountName=credential)`,
scope: "sub"
}
const s = await client.search(myBaseDN, opts)
console.log('thumbnailPhoto', s[0].thumbnailPhoto)
}
The console.log() outputs something like '����JFIF��C...'
I cannot figure out how to save this binary into a file. When I try several approaches, as explained here, does not work. It seems the data from AD is not in the same "format".
I tried to convert it into a Buffer and then, to base64
const buffer = Buffer.from(s[0].thumbnailPhoto, 'binary')
var src = "data:image/png;base64," + Buffer.from(s[0].thumbnailPhoto).toString('base64')
But the output is not a valid base64.

Validate excel file before uploading using javascript

I need to write a functionality to upload an excel file (size: 1 GB).
I would like to validate the column name of excel file. If that matches the predefined column names then only upload the file else show error.
My question is: Is there a way to validate(need to check only the column name) this huge excel file using javascript or jquery ?
ps: at backend I am using Spring.
please follow the below concept using this way you will be able to get your required results
const importExcel = (e) => {
const file = e.target.files[0]
const reader = new FileReader()
reader.onload = (event) => {
//parse data
const bstr = event.target.result
const workBook = XLSX.read(bstr, { type: "binary" })
//get first sheet
const workSheetName = workBook.SheetNames[0]
const workSheet = workBook.Sheets[workSheetName]
//convert to array
const fileData = XLSX.utils.sheet_to_json(workSheet, { header: 1 })
// console.log(fileData)
const headers = fileData[0]
const heads = headers.map(head => ({ title: head, field: head }))
setColDefs(heads)

Categories

Resources