XLSX to CSV file convert for API call - javascript

Im trying to convert .xlsx file to .csv file before i send it to the API (because it accepts only .csv files) ;
I am using package xlsx and i got this function that converts it for me but problem is that this function will make the user download the file, and i don't want that i just want that it saves it kinda in like a object so i can use it only for the api (and don't let know the user that its converted ).
Here is code:
file.arrayBuffer().then(res => {
let data = new Uint8Array(res)
let workbook = XLSX.read(data, {type: "array"})
let first_sheet_name = workbook.SheetNames[0]
let worksheet = workbook.Sheets[first_sheet_name]
let jsonData = XLSX.utils.sheet_to_json(worksheet, {raw: false, defval: null})
let fileName;
if (file) {
fileName = file?.name.substring(0, file?.name?.indexOf("."))
}
let new_worksheet = XLSX.utils.json_to_sheet(jsonData);
let new_workbook = XLSX.utils.book_new();
XLSX.utils.book_append_sheet(new_workbook, new_worksheet, "csv_sheet")
getRandomRows(jsonData)
XLSX.writeFile(new_workbook, fileName + ".csv")
})
Was wondering if there are other options too.

Based on Readme from xlsx try to use writeFileAsync or just write instead of writeFile as it force browser to start download

I fixed it this way :
let dataToSheet = XLSX.utils.json_to_sheet(data)
const dataFileCSV = XLSX.utils.sheet_to_csv(dataToSheet, {raw: false, defval: null});
let blob = new Blob(["\ufeff", dataFileCSV]);
let fileBlob = new File([blob], "name");

Related

Using ExcelJS, how to read data from excel file using URL?

I have excel file upload in cloud. Using node js when front end pass URL i need to read data from excel file. Below is my code,
var workbook = new ExcelJS.Workbook();
workbook.xlsx.load('https://file-examples.com/wp-content/uploads/2017/02/file_example_XLSX_10.xlsx')
.then(function() {
var worksheet = workbook.getWorksheet(sheet);
worksheet.eachRow({ includeEmpty: true }, function(row, rowNumber) {
console.log("Row " + rowNumber + " = " + JSON.stringify(row.values));
});
});
But I'm getting this error,
Can't find end of central directory : is this a zip file ? If it is, see https://stuk.github.io/jszip/documentation/howto/read_zip.html
Can someone help me to fix this?
Fixed using the below code.
const axios = require("axios");
const XLSX = require("xlsx");
const jsontoxml = require("jsontoxml");
async function testAxiosXlsx(url) {
const options = {
url,
responseType: "arraybuffer"
}
let axiosResponse = await axios(options);
const workbook = XLSX.read(axiosResponse.data);
let worksheets = workbook.SheetNames.map(sheetName => {
return { sheetName, data: XLSX.utils.sheet_to_json(workbook.Sheets[sheetName]) };
});
console.log("json:\n", JSON.stringify(worksheets), "\n\n");
console.log("xml:\n", jsontoxml(worksheets, {}));
}
testAxiosXlsx("https://file-examples.com/wp-content/uploads/2017/02/file_example_XLSX_10.xlsx");
Look at the documentation.
The load method expects you to pass a Buffer containing Excel data.
You are passing it a string, and that string contains a URL, not Excel data.
You need to fetch the data and convert it to a buffer.

Fail to upload local CSV file with fetch() and fail to execute FileReader()

I'm trying to manipulate a local CSV file with JavaScript. My purpose is to display datas from my CSV on my website, like we were using an external API in JSON format for example.
const csvLocalFile =
"http://XXX/.../file.csv";
const openFile = async () => {
const csv = await fetch(csvLocalFile).then();
let reader = new FileReader();
reader.onload = function () {
let text = reader.result;
filecontent = text.replace("", "");
};
reader.readAsText(csv.files[0]);
};
openFile();
Chrome display this error :
TypeError: Cannot read properties of undefined (reading '0')
When I delete "[0]" from "reader.readAsText(csv.files[0])", I have this message error :
TypeError: Failed to execute 'readAsText' on 'FileReader': parameter 1 is not of type 'Blob'.
A empty .then() isn't the problem, turns out it works fine without a fn. but you should remove it regardless.
The FileReader can't read Response objects, only File & Blobs...
You say that you try to read a local file, but yet you use fetch to get a remote file, so what is it that your really trying to do? it's unclear of how to help you...
a csv isn't valid json data, so you can't use .then((res) => res.JSON())
beside res.JSON() is wrong, it should be all lowercased... res.json()
The FileReader is considered legacy so you no longer need it... use await blob.text() instead
here are two examples of how to read 1 remote file using fetch
// Simulate getting a file from eg a file input or drag and drop
const file = new File(['id,name\n10,bob'], 'file.csv', { type: 'text/csv' })
// Simulate a remote location to get the csv from
const url = URL.createObjectURL(file)
const csvLocalFile = url // http://XXX/.../file.csv
const openFile = async () => {
const response = await fetch(csvLocalFile)
const text = await response.text()
console.log(text)
}
openFile()
...and another with a actual local file selected from a user input
const fileInput = document.querySelector('#fileInput')
fileInput.onchange = async () => {
const file = fileInput.files[0]
const text = await file.text()
console.log(text)
}
// ignore code below this line...
// Create a dummy file that we can use to change the file input with...
const dummyFile = new File(['id,name\n10,bob'], 'file.csv', { type: 'text/csv' })
// Used for creating a new FileList in a round-about way
const b = new ClipboardEvent('').clipboardData || new DataTransfer()
b.items.add(dummyFile)
// simulate picking a file
fileInput.files = b.files
fileInput.onchange()
<input type="file" id="fileInput">

Parsing Image from Excel Sheet in JavaScript

The excel file will consist of images and record based on it, I am trying to parse it into JSON object but the image field will not be there, I am using xlsx library for parsing it. thanks for any help...
if(inputElement.files[0].name.match(/.(xlsx)$/i) || inputElement.files[0].name.match(/.(xls)$/i)){
let fileReader = new FileReader();
fileReader.readAsBinaryString(inputElement.files[0]);
fileReader.onload = (event)=>{
let data = event.target.result;
let workbook = XLSX.read(data,{type:"binary"});
workbook.SheetNames.forEach(sheet => {
let rowObject = XLSX.utils.sheet_to_row_object_array(workbook.Sheets[sheet]);
console.log(rowObject);
});
}
}

how to read a Blob in D3.js

I need to download a blob, then read the values and create D3 charts. So far I managed to download the blob file from Azure container. But I don't know how to get the values from it and give it to the d3.js. How much I know, D3 can read CSV files, so I tried to convert the blob to CSV file but didn't work. Is there a way to do this ?
thanks !
const AzureService = {
execute: async function() {
const accountName = STORAGE_ACCOUNT_NAME;
const blobName = "xxxxxxxxxxxxxxxxxxxxx";
const sasString = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
const containerName = "xxxxxxxxxxxxxxxxxxxxxxxxxx";
const containerURL = new azblob.ContainerURL(
`https://${accountName}.blob.core.windows.net/${containerName}?${sasString}`,
azblob.StorageURL.newPipeline(new azblob.AnonymousCredential())
);
const blobURL = azblob.BlobURL.fromContainerURL(containerURL, blobName);
const downloadBlobResponse = await blobURL.download(azblob.Aborter.none, 0);
return await downloadBlobResponse.blobBody;
}
};
getting the blob in the service
componentDidMount() {
this.file = AzureService.execute().then(data => {
this.readCSVs(data);
});
}
There I'm trying to convert the blob and use D3.csv to read
readCSVs(data) {
let contentType = "text/csv";
let csvFile = new Blob([data], { type: contentType });
let svg, productName, filtered;
d3.csv(csvFile).then(data => {
svg = d3
.select("#users_bar2")
.style("width", 500)
.style("height", 500);
productName = "CLeg";
filtered = data.filter(x => x.productName == productName);
this.histogram(svg, filtered, 500, 500, "hist", productName);
});
}
Based on the documentation available here, you can use d3.csv file to read directly from the blob (no need for a separate function to read from Azure Blob Storage).
d3.csv(`https://${accountName}.blob.core.windows.net/${containerName}?${sasString}`, row, callback)
where
row: parser function to parse a record (row) into JSON format
callback: callback function

Validate excel file before uploading using javascript

I need to write a functionality to upload an excel file (size: 1 GB).
I would like to validate the column name of excel file. If that matches the predefined column names then only upload the file else show error.
My question is: Is there a way to validate(need to check only the column name) this huge excel file using javascript or jquery ?
ps: at backend I am using Spring.
please follow the below concept using this way you will be able to get your required results
const importExcel = (e) => {
const file = e.target.files[0]
const reader = new FileReader()
reader.onload = (event) => {
//parse data
const bstr = event.target.result
const workBook = XLSX.read(bstr, { type: "binary" })
//get first sheet
const workSheetName = workBook.SheetNames[0]
const workSheet = workBook.Sheets[workSheetName]
//convert to array
const fileData = XLSX.utils.sheet_to_json(workSheet, { header: 1 })
// console.log(fileData)
const headers = fileData[0]
const heads = headers.map(head => ({ title: head, field: head }))
setColDefs(heads)

Categories

Resources