I need to download a blob, then read the values and create D3 charts. So far I managed to download the blob file from Azure container. But I don't know how to get the values from it and give it to the d3.js. How much I know, D3 can read CSV files, so I tried to convert the blob to CSV file but didn't work. Is there a way to do this ?
thanks !
const AzureService = {
execute: async function() {
const accountName = STORAGE_ACCOUNT_NAME;
const blobName = "xxxxxxxxxxxxxxxxxxxxx";
const sasString = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
const containerName = "xxxxxxxxxxxxxxxxxxxxxxxxxx";
const containerURL = new azblob.ContainerURL(
`https://${accountName}.blob.core.windows.net/${containerName}?${sasString}`,
azblob.StorageURL.newPipeline(new azblob.AnonymousCredential())
);
const blobURL = azblob.BlobURL.fromContainerURL(containerURL, blobName);
const downloadBlobResponse = await blobURL.download(azblob.Aborter.none, 0);
return await downloadBlobResponse.blobBody;
}
};
getting the blob in the service
componentDidMount() {
this.file = AzureService.execute().then(data => {
this.readCSVs(data);
});
}
There I'm trying to convert the blob and use D3.csv to read
readCSVs(data) {
let contentType = "text/csv";
let csvFile = new Blob([data], { type: contentType });
let svg, productName, filtered;
d3.csv(csvFile).then(data => {
svg = d3
.select("#users_bar2")
.style("width", 500)
.style("height", 500);
productName = "CLeg";
filtered = data.filter(x => x.productName == productName);
this.histogram(svg, filtered, 500, 500, "hist", productName);
});
}
Based on the documentation available here, you can use d3.csv file to read directly from the blob (no need for a separate function to read from Azure Blob Storage).
d3.csv(`https://${accountName}.blob.core.windows.net/${containerName}?${sasString}`, row, callback)
where
row: parser function to parse a record (row) into JSON format
callback: callback function
Related
I have excel file upload in cloud. Using node js when front end pass URL i need to read data from excel file. Below is my code,
var workbook = new ExcelJS.Workbook();
workbook.xlsx.load('https://file-examples.com/wp-content/uploads/2017/02/file_example_XLSX_10.xlsx')
.then(function() {
var worksheet = workbook.getWorksheet(sheet);
worksheet.eachRow({ includeEmpty: true }, function(row, rowNumber) {
console.log("Row " + rowNumber + " = " + JSON.stringify(row.values));
});
});
But I'm getting this error,
Can't find end of central directory : is this a zip file ? If it is, see https://stuk.github.io/jszip/documentation/howto/read_zip.html
Can someone help me to fix this?
Fixed using the below code.
const axios = require("axios");
const XLSX = require("xlsx");
const jsontoxml = require("jsontoxml");
async function testAxiosXlsx(url) {
const options = {
url,
responseType: "arraybuffer"
}
let axiosResponse = await axios(options);
const workbook = XLSX.read(axiosResponse.data);
let worksheets = workbook.SheetNames.map(sheetName => {
return { sheetName, data: XLSX.utils.sheet_to_json(workbook.Sheets[sheetName]) };
});
console.log("json:\n", JSON.stringify(worksheets), "\n\n");
console.log("xml:\n", jsontoxml(worksheets, {}));
}
testAxiosXlsx("https://file-examples.com/wp-content/uploads/2017/02/file_example_XLSX_10.xlsx");
Look at the documentation.
The load method expects you to pass a Buffer containing Excel data.
You are passing it a string, and that string contains a URL, not Excel data.
You need to fetch the data and convert it to a buffer.
I'm trying to manipulate a local CSV file with JavaScript. My purpose is to display datas from my CSV on my website, like we were using an external API in JSON format for example.
const csvLocalFile =
"http://XXX/.../file.csv";
const openFile = async () => {
const csv = await fetch(csvLocalFile).then();
let reader = new FileReader();
reader.onload = function () {
let text = reader.result;
filecontent = text.replace("", "");
};
reader.readAsText(csv.files[0]);
};
openFile();
Chrome display this error :
TypeError: Cannot read properties of undefined (reading '0')
When I delete "[0]" from "reader.readAsText(csv.files[0])", I have this message error :
TypeError: Failed to execute 'readAsText' on 'FileReader': parameter 1 is not of type 'Blob'.
A empty .then() isn't the problem, turns out it works fine without a fn. but you should remove it regardless.
The FileReader can't read Response objects, only File & Blobs...
You say that you try to read a local file, but yet you use fetch to get a remote file, so what is it that your really trying to do? it's unclear of how to help you...
a csv isn't valid json data, so you can't use .then((res) => res.JSON())
beside res.JSON() is wrong, it should be all lowercased... res.json()
The FileReader is considered legacy so you no longer need it... use await blob.text() instead
here are two examples of how to read 1 remote file using fetch
// Simulate getting a file from eg a file input or drag and drop
const file = new File(['id,name\n10,bob'], 'file.csv', { type: 'text/csv' })
// Simulate a remote location to get the csv from
const url = URL.createObjectURL(file)
const csvLocalFile = url // http://XXX/.../file.csv
const openFile = async () => {
const response = await fetch(csvLocalFile)
const text = await response.text()
console.log(text)
}
openFile()
...and another with a actual local file selected from a user input
const fileInput = document.querySelector('#fileInput')
fileInput.onchange = async () => {
const file = fileInput.files[0]
const text = await file.text()
console.log(text)
}
// ignore code below this line...
// Create a dummy file that we can use to change the file input with...
const dummyFile = new File(['id,name\n10,bob'], 'file.csv', { type: 'text/csv' })
// Used for creating a new FileList in a round-about way
const b = new ClipboardEvent('').clipboardData || new DataTransfer()
b.items.add(dummyFile)
// simulate picking a file
fileInput.files = b.files
fileInput.onchange()
<input type="file" id="fileInput">
Im trying to convert .xlsx file to .csv file before i send it to the API (because it accepts only .csv files) ;
I am using package xlsx and i got this function that converts it for me but problem is that this function will make the user download the file, and i don't want that i just want that it saves it kinda in like a object so i can use it only for the api (and don't let know the user that its converted ).
Here is code:
file.arrayBuffer().then(res => {
let data = new Uint8Array(res)
let workbook = XLSX.read(data, {type: "array"})
let first_sheet_name = workbook.SheetNames[0]
let worksheet = workbook.Sheets[first_sheet_name]
let jsonData = XLSX.utils.sheet_to_json(worksheet, {raw: false, defval: null})
let fileName;
if (file) {
fileName = file?.name.substring(0, file?.name?.indexOf("."))
}
let new_worksheet = XLSX.utils.json_to_sheet(jsonData);
let new_workbook = XLSX.utils.book_new();
XLSX.utils.book_append_sheet(new_workbook, new_worksheet, "csv_sheet")
getRandomRows(jsonData)
XLSX.writeFile(new_workbook, fileName + ".csv")
})
Was wondering if there are other options too.
Based on Readme from xlsx try to use writeFileAsync or just write instead of writeFile as it force browser to start download
I fixed it this way :
let dataToSheet = XLSX.utils.json_to_sheet(data)
const dataFileCSV = XLSX.utils.sheet_to_csv(dataToSheet, {raw: false, defval: null});
let blob = new Blob(["\ufeff", dataFileCSV]);
let fileBlob = new File([blob], "name");
I am working on reactjs/typescript applications. I am trying to download some files from azure storage v2. Below is the sample path I am supposed to download files. In this path, enrichment is the container name, and the rest all are folders. I am trying to download the last modified file from reportdocument folder.
enrichment/report/SAR-1234-56/reportdocument/file1.docs
I tried something below.
#action
public async reportDownload(sarNumber: string) {
let storage = globals.getGlobals('StorageAccount03');
console.log(storage);
let containerName = globals.getGlobals('StorageAccount03ContainerName');
let marker = undefined;
let allUploadPromise: Array<Promise<unknown>> = [];
const config = {
path: `/Storage/getsastoken/?storageName=${storage}&containerName=${containerName}`,
method: "GET",
success: (url: any) => {
const containerURL: ContainerURL = new ContainerURL(
url,
StorageURL.newPipeline(new AnonymousCredential()));
const listBlobsResponse = containerURL.listBlobFlatSegment(
Aborter.none,
marker,
);
}
};
await handleRequest(config);
}
From here I am struggling to download the latest modified file from the above path.
can someone help me to fix this? Any help would be greatly appreciated. Thank you
It's better to use #azure/storage-blob library and then the code would be something like below instead of directly trying to call blob REST API like you were trying in your code which seems unnecessary reinventing the wheel. The library already does it for you. Refer this for details.
const { BlobServiceClient } = require("#azure/storage-blob");
const account = "<account name>";
const sas = "<service Shared Access Signature Token>";
const containerName = "<container name>";
const blobName = "<blob name>";
const blobServiceClient = new BlobServiceClient(`https://${account}.blob.core.windows.net${sas}`);
async function download() {
const containerClient = blobServiceClient.getContainerClient(containerName);
const blobClient = containerClient.getBlobClient(blobName);
// Get blob content from position 0 to the end
// In browsers, get downloaded data by accessing downloadBlockBlobResponse.blobBody
const downloadBlockBlobResponse = await blobClient.download();
const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody);
console.log("Downloaded blob content", downloaded);
// [Browsers only] A helper method used to convert a browser Blob into string.
async function blobToString(blob) {
const fileReader = new FileReader();
return new Promise((resolve, reject) => {
fileReader.onloadend = (ev) => {
resolve(ev.target.result);
};
fileReader.onerror = reject;
fileReader.readAsText(blob);
});
}
}
The SAS token expiry bothers me.You cannot have a static SAS token that expires sooner unless we can set long expiry (user-delegation SAS token is short lived). Do we really have the capability to create the SAS token dynamically in javascript runtime? I think it's only possible in NodeJS runtime.
I need to write a functionality to upload an excel file (size: 1 GB).
I would like to validate the column name of excel file. If that matches the predefined column names then only upload the file else show error.
My question is: Is there a way to validate(need to check only the column name) this huge excel file using javascript or jquery ?
ps: at backend I am using Spring.
please follow the below concept using this way you will be able to get your required results
const importExcel = (e) => {
const file = e.target.files[0]
const reader = new FileReader()
reader.onload = (event) => {
//parse data
const bstr = event.target.result
const workBook = XLSX.read(bstr, { type: "binary" })
//get first sheet
const workSheetName = workBook.SheetNames[0]
const workSheet = workBook.Sheets[workSheetName]
//convert to array
const fileData = XLSX.utils.sheet_to_json(workSheet, { header: 1 })
// console.log(fileData)
const headers = fileData[0]
const heads = headers.map(head => ({ title: head, field: head }))
setColDefs(heads)