how to extract json data in xlxs using json_to_sheet - javascript

I am trying to fill an excel file using the JSON below. I am getting the JSON data from HTTP response and I would like to use it for downloading an excel file.
{
"dynaModel":[
{
"map":{
"UNIT/SUBUNIT":"sdasd",
"SUBUNIT/ISU/GEO":"sasd",
"REVENUEINR-RS":"₹87,sdd",
"COSTINR-RS":"₹47,33",
"GMINR-RSUSD-$":46,
"REVENUEINR-RS":"₹87,64,",
"COSTINR-RS":"₹47,33,",
"GMINR-RSUSD-$":46
}
},
{
"map":{
"UNIT/SUBUNIT":"fghf",
"SUBUNIT/ISU/GEO":"CMghhfI",
"REVENUEINR-RS":"₹59,06",
"COSTINR-RS":"₹30,43",
"GMINR-RSUSD-$":48.47,
"REVENUEINR-RS":"₹59",
"COSTINR-RS":"₹30,43",
"GMINR-RSUSD-$":48.47
}
},
{
"map":{
"UNIT/SUBUNIT":"hfgh",
"SUBUNIT/ISU/GEO":"fghh",
"'APR-16'_REVENUEINR-RS":"₹29,72",
"'APR-16'_COSTINR-RS":"₹11,43",
"'APR-16'_GMINR-RSUSD-$":61.53,
"'Total'_REVENUEINR-RS":"₹29,72",
"'Total'_COSTINR-RS":"₹11,43",
"'Total'_GMINR-RSUSD-$":61.53
}
}
]
}
my components code is shown below :
excelDownload(){
this._isuGeoSubunitReportService.excelDownload(this.isugeosubunitTO)
.subscribe(data =>this.responseStatus = data,
err => console.log(err),
() => console.log('Request Completed222')
);
const ws_name = 'SomeSheet';
const wb: WorkBook = { SheetNames: [], Sheets: {} };
const ws: any = utils.json_to_sheet(this.responseStatus.dynamoModel);
wb.SheetNames.push(ws_name);
wb.Sheets[ws_name] = ws;
const wbout = write(wb, { bookType: 'xlsx', bookSST: true, type: 'binary' });
function s2ab(s) {
const buf = new ArrayBuffer(s.length);
const view = new Uint8Array(buf);
for (let i = 0; i !== s.length; ++i) {
view[i] = s.charCodeAt(i) & 0xFF;
};
return buf;
}
saveAs(new Blob([s2ab(wbout)], { type: 'application/octet-stream' }), 'exported.xlsx');
}
I am trying to download the result in xlxs for which I am using xlxs module of json. its works fine for simple json but my json data is different.
const ws: any = utils.json_to_sheet(this.responseStatus);
if I simply put the this.responseStatus it return only one map value in the xlxs.

There is a typing error in your code. You are using: this.responseStatus.dynamoModel instead of this.responseStatus.dynaModel.
Also, according to the documentation of the module xlsx, your array to parse is expected to have a syntax like this one before using utils.json_to_sheet:
[
{S:1,h:2,e:3,e_1:4,t:5,J:6,S_1:7},
{S:2,h:3,e:4,e_1:5,t:6,J:7,S_1:8}
]
So you can use the function utils.aoa_to_sheet instead of utils.json_to_sheet
Another option, you could create your own function that will parse your data so as to have a form like the one given in the documentation for utils.json_to_sheet. This is how I transformed the code in your component.
excelDownload() {
this.appService.excelDownload()
.subscribe(data => {
this.responseStatus = data;
this.generateExcelFile(data);
},
err => console.log(err),
() => console.log('Request Completed222')
);
}
generateExcelFile(data: any) {
this.responseStatus = data;
const ws_name = 'SomeSheet';
const wb: WorkBook = { SheetNames: [], Sheets: {} };
const ws: any = utils.json_to_sheet(parseArray(this.responseStatus.dynaModel));
wb.SheetNames.push(ws_name);
wb.Sheets[ws_name] = ws;
const wbout = write(wb, { bookType: 'xlsx', bookSST: true, type: 'binary' });
function s2ab(s) {
const buf = new ArrayBuffer(s.length);
const view = new Uint8Array(buf);
for (let i = 0; i !== s.length; ++i) {
view[i] = s.charCodeAt(i) & 0xFF;
}
return buf;
}
// function to parse your array coming from the backend
function parseArray(dataToParse: any) {
const newArray = [];
dataToParse.forEach(item => {
Object.keys(item).forEach(key => {
newArray.push(item[key]);
});
});
console.log('newArray:' + JSON.stringify(newArray));
return newArray;
}
FileSaver.saveAs(new Blob([s2ab(wbout)],
{ type: 'application/octet-stream'}),
'exported.xlsx');
}

Related

How to upload files using Axios when we know the full path?

I try to upload file using Axios but I want to just use string of file path. Using code below it is working:
<input
id="select-files"
style="visibility: hidden"
type="file"
multiple
#change="handleFilesUpload($event)"
/>
But when I tried to use createReadStream it does not work. I wonder how I could convert these path files to event.target.files.
I already try the code above but it does not work:
let data = {
THE_FILE: "",
BRANCH_ID: this.$store.state.starv.localUser.DOCTOR_INFO["BRANCH_ID"],
ACC_NO: this.locationItem["ACC_NO"],
CHART_NO: this.locationItem["CHART_NO"],
EMP_ID: this.$store.state.starv.localUser.DOCTOR_INFO["EMP_ID"],
CO_EMP_ID: this.doctorList.toString(),
ST: "telehealthclient",
NEW_NAME: "",
MAID: LocalData.getComputerId(),
}
/*
Iterate over any file sent over appending the files to the form data.
*/
data["THE_FILE"] = window.fs.createReadStream(filePath)
let bodyFormData = new FormData()
// if (THE_FILE) {
// bodyFormData.append("THE_FILE", THE_FILE)
// }
for (let key in data) {
bodyFormData.append(key, data[key])
}
I already found the solution to this problem, what we would to do are below:
Encode our file to base64
base64_encode(file) {
// read binary data
let bitmap = window.fs.readFileSync(file);
// convert binary data to base64 encoded string
return new Buffer(bitmap).toString("base64");
},
Create a file-url object from our base64
dataURLtoFile(dataurl, filename) {
const arr = dataurl.split(",");
const mime = arr[0].match(/:(.*?);/)[1];
const bstr = atob(arr[1]);
let n = bstr.length;
const u8arr = new Uint8Array(n);
while (n) {
u8arr[n - 1] = bstr.charCodeAt(n - 1);
n -= 1; // to make eslint happy
}
return new File([u8arr], filename, { type: mime });
},
Create form data from form-data library Upload using Axios
form-multipart
Full code is below
base64_encode(file) {
// read binary data
let bitmap = window.fs.readFileSync(file);
// convert binary data to base64 encoded string
return new Buffer(bitmap).toString("base64");
},
dataURLtoFile(dataurl, filename) {
const arr = dataurl.split(",");
const mime = arr[0].match(/:(.*?);/)[1];
const bstr = atob(arr[1]);
let n = bstr.length;
const u8arr = new Uint8Array(n);
while (n) {
u8arr[n - 1] = bstr.charCodeAt(n - 1);
n -= 1; // to make eslint happy
}
return new File([u8arr], filename, { type: mime });
},
uploadScreenRecord(data) {
return new Promise((resolve, reject) => {
// #1 Convert to base64 first
let base64_video = this.base64_encode(data.file);
// #2 Create file url object from base64
let filename = path.parse(data.file).base;
let fileURLobject = this.dataURLtoFile(
"data:video/mp4;base64," + base64_video,
filename
);
// #3 Create form data from form-data libary
const formData = new formData_();
formData.append("THE_FILE", fileURLobject, filename);
for (let key in data) {
if (key != "file") {
formData.append(key, data[key]);
}
}
// #4 Send to server
let url;
if (SETTING.imedtacDomain.hostname == undefined) {
url = SETTING.webservice.imedtacProtocol + "//" + defaultDomain;
} else {
url =
SETTING.webservice.imedtacProtocol + "//" + SETTING.imedtacDomain.hostname;
}
axios
.post(url + SETTING.imedtacAPI.uploadFile.URL, formData, {
headers: {
"Access-Control-Allow-Origin": "*",
"Content-Type": "multipart/form-data",
},
timeout: 30000,
})
.then(function (response) {
//Return Patient
console.log("[File debug] This is upload file response %o", response);
if (response.data.success) {
resolve(response.data.FILE_URL);
} else {
reject(response.data.message + " screen record upload error");
}
})
.catch(function (error) {
reject(error);
});
});
},
submitFilesTest() {
return new Promise((resolve, reject) => {
//Data
let data = {
file: "/Users/ivanhutomo/Downloads/B0120221214151850_A.mp4",
BRANCH_ID: "xxx",
ACC_NO: "xx",
CHART_NO: "xx",
EMP_ID: "xx",
CO_EMP_ID: "xx",
ST: "xx",
NEW_NAME: "",
MAID: "xx",
};
this.uploadScreenRecord(data)
.then((response) => {
logger.debug("[File debug] File upload URL %o", response);
resolve(response);
})
.catch((error) => {
logger.debug("[File debug] File %o", error);
reject(error);
});
});
},
submitFilesTest()

Building an Object from fetch statement

I have some code that when you console.log it, it looks like the image below:
The code I am running is as follows:
onClick={() => {
const stream = fetch(
'https://lichess.org/api/games/user/neio',
{ headers: { Accept: 'application/x-ndjson' } }
);
const onMessage = obj => {
console.log('test', obj);
};
const onComplete = () =>
console.log('The stream has completed');
stream.then(readStream(onMessage)).then(onComplete);
}}
export const readStream = processLine => response => {
const stream = response.body.getReader();
const matcher = /\r?\n/;
const decoder = new TextDecoder();
let buf = '';
const loop = () =>
stream.read().then(({ done, value }) => {
if (done) {
if (buf.length > 0) processLine(JSON.parse(buf));
} else {
const chunk = decoder.decode(value, {
stream: true,
});
buf += chunk;
const parts = buf.split(matcher);
buf = parts.pop();
for (const i of parts) processLine(JSON.parse(i));
return loop();
}
});
return loop();
};
export default readStream;
What I am trying to do is build a parent object that contains all these individual rows of data.
I'm new at promises and fetch etc. So currently, I have no idea on how to build this parent object that contains each individual row.
Any suggestions?
Can't you have a global array and add items to it like:
var arrCollection = [];
...
const onMessage = obj => {
arrCollection.push(obj);
};
You can have an object with those items doing like:
var objCollection = { items: arrCollection };

JS - StreamSaver downlad does not start

I download data from API in chunks decrypt it and than pass to ReadableStream.
But after last chunk, the file is not downloaded.
I work with axios and StreamSaver.js
Code:
Above in the code I declare:
this.filestream = streamSaver.createWriteStream('sample.jpg');
this.writer = await this.filestream.getWriter();
let readableStream;
readableStream = new ReadableStream({
start(ctrl) {
const nextChunk = async () => {
let fileDataResponse = await that.$api.post(
'endpoint', {
file_id: UUID,
chunk_index: index
}, {
headers: {
...
}
}
);
done =
fileDataResponse.data.length <=
fileDataResponse.data.current_index;
if (fileDataResponse.data.data) {
let data = await that.decryptData(fileDataResponse.data.data);
ctrl.enqueue(data);
}
if (!done) {
index += 1;
nextChunk();
} else {
ctrl.close();
}
};
nextChunk();
}
});
const reader = readableStream.getReader();
const close = () => {
that.writer.close();
};
const pump = () =>
reader.read().then((res) => {
if (!res.done) {
that.writer.write(res.value).then(pump);
} else {
close();
}
});
pump();
Where could be my error here?
Thank you a lot!
Issue was the res.value is not an Int8Array

Convert image path to blob react native

Problem
I am trying to create an app with react native and firebase. One of the features I would like for this app is the ability to upload images. I am having some trouble uploading the images to firebase storage though. I am using expo's image picker to find the path of the image that the user wants to upload, but once I have the path I don't know how to convert that to something I can upload to firebase.
Can somebody help me convert the path of an image to something I can upload to firebase storage with react native?
What I've tried
I tried using:
_pickImage = async () => {
let result = await ImagePicker.launchImageLibraryAsync({
MediaTypeOptions: 'Images',
quality: 0.4,
_uploadAsByteArray = async (pickerResultAsByteArray, progressCallback) => {
try {
var metadata = {
contentType: 'image/jpeg',
};
var storageRef = firebase.storage().ref();
var ref = storageRef.child('images/'+expoID+'/'+this.state.time)
let uploadTask = ref.put(pickerResultAsByteArray, metadata)
uploadTask.on('state_changed', function (snapshot) {
progressCallback && progressCallback(snapshot.bytesTransferred / snapshot.totalBytes)
var progress = (snapshot.bytesTransferred / snapshot.totalBytes) * 100;
console.log('Upload is ' + progress + '% done');
}, function (error) {
console.log("in _uploadAsByteArray ", error)
}, function () {
var downloadURL = uploadTask.snapshot.downloadURL;
console.log("_uploadAsByteArray ", uploadTask.snapshot.downloadURL)
this.setState({imageUploaded:true})
});
} catch (ee) {
console.log("when trying to load _uploadAsByteArray ", ee)
}
}
convertToByteArray = (input) => {
var binary_string = this.atob(input);
var len = binary_string.length;
var bytes = new Uint8Array(len);
for (var i = 0; i < len; i++) {
bytes[i] = binary_string.charCodeAt(i);
}
return bytes
}
atob = (input) => {
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=';
let str = input.replace(/=+$/, '');
let output = '';
if (str.length % 4 == 1) {
throw new Error("'atob' failed: The string to be decoded is not correctly encoded.");
}
for (let bc = 0, bs = 0, buffer, i = 0;
buffer = str.charAt(i++);
~buffer && (bs = bc % 4 ? bs * 64 + buffer : buffer,
bc++ % 4) ? output += String.fromCharCode(255 & bs >> (-2 * bc & 6)) : 0
) {
buffer = chars.indexOf(buffer);
}
return output;
}
uploadImage(bsfdata){
this.setState({imageUploaded:false})
this._uploadAsByteArray(this.convertToByteArray(bsfdata), (progress) => {
this.setState({ progress:progress })
})
}
base64:true,
});
/* if (!result.cancelled) {
this.setState({ image: result.uri });
let formData = new FormData();
formData.append('photo', {
uri,
name: `photo.${fileType}`,
type: `image/${fileType}`,
});}*/
this.uploadImage(result.base64);
};
}
I've tried it with the commented code added, which doesn't upload anything, and I've tried it with how the code is now, which gives me the error Can currently only create a Blob from other Blobs, and the uploading progress never gets above 0%.
If you are using expo (>=26), then you can do it easily with the following lines of code.
uploadImage = async(imageUri) => {
const response = await fetch(imageUri);
const blob = await response.blob();
var ref = firebase.storage().ref().child("image.jpg");
return ref.put(blob);
}
Reference: https://youtu.be/KkZckepfm2Q
Refer this link - https://github.com/dailydrip/react-native-firebase-storage/blob/master/src/App.js#L43-L69
Following block of code is working fine.
uploadImage(uri, mime = 'application/octet-stream') {
return new Promise((resolve, reject) => {
const uploadUri = Platform.OS === 'ios' ? uri.replace('file://', '') : uri
let uploadBlob = null
const imageRef = FirebaseClient.storage().ref('images').child('image_001')
fs.readFile(uploadUri, 'base64')
.then((data) => {
return Blob.build(data, { type: `${mime};BASE64` })
})
.then((blob) => {
uploadBlob = blob
return imageRef.put(blob, { contentType: mime })
})
.then(() => {
uploadBlob.close()
return imageRef.getDownloadURL()
})
.then((url) => {
resolve(url)
})
.catch((error) => {
reject(error)
})
})
}
You need to install rn-fetch-blob module:
npm install --save rn-fetch-blob
Then, do the following:
import RNFetchBlob from 'rn-fetch-blob';
const Blob = RNFetchBlob.polyfill.Blob;
const fs = RNFetchBlob.fs;
window.XMLHttpRequest = RNFetchBlob.polyfill.XMLHttpRequest;
window.Blob = Blob;
function uploadImage(path) {
const imageFile = RNFetchBlob.wrap(path);
// 'path/to/image' is where you wish to put your image in
// the database, if you would like to put it in the folder
// 'subfolder' inside 'mainFolder' and name it 'myImage', just
// replace it with 'mainFolder/subfolder/myImage'
const ref = firebase.storage().ref('path/to/image');
var uploadBlob = null;
Blob.build(imageFile, { type: 'image/jpg;' })
.then((imageBlob) => {
uploadBlob = imageBlob;
return ref.put(imageBlob, { contentType: 'image/jpg' });
})
.then(() => {
uploadBlob.close();
return ref.getDownloadURL();
})
.((url) => {
// do something with the url if you wish to
})
.catch(() => {
dispatch({
type: UPDATE_PROFILE_INFO_FAIL,
payload: 'Unable to upload profile picture, please try again'
});
});
}
Please do ask if there's any part of the code that you don't understand. To upload multiple images, simply wrap this code with a for loop. Or if you want to make sure that every image is uploaded without any error, use Promise
Not sure whom this might help, but if you're using MediaLibrary to load images from the gallery, then the uri comes in the format of uri = file:///storage/emulated/0/DCIM/Camera/filename.jpg
In this case, using fetch(uri) didn't help me get the blob.
But if you use fetch(uri.replace("file:///","file:/")) and then follow #sriteja Sugoor's answer, you'll be able to upload the file blob.
const Blob = RNFetchBlob.polyfill.Blob;
const fs = RNFetchBlob.fs;
let uploadBlob;
await fs
.readFile(params?.file.path, 'base64')
.then((data) => {
return Blob.build(data, {type: `BASE64`});
})
.then((blob) => {
uploadBlob = blob;
console.log(uploadBlob, 'uploadBlob');
});

Why does my code using insertMany() skips some of the records and insert same records multiple times?

I have 9577 unique records in a csv file.
This code inserts 9800 records and insert not all records, but duplicates of some of them. Any idea why it does not inserts the unique 9577 records and also duplicates of some of them? Below I also insert the remain part of the code so you get the whole picture
function bulkImportToMongo(arrayToImport, mongooseModel) {
const Model = require(`../../../models/${mongooseModel}`);
let batchCount = Math.ceil(arrayToImport.length / 100);
console.log(arrayToImport.length);
let ops = [];
for (let i = 0; i < batchCount; i++) {
// console.log(i);
let batch = arrayToImport.slice(i, i + 100);
console.log(batch.length);
ops.push(Model.insertMany(batch));
}
return ops;
return Promise.all(ops).then(results => {
// results is an array of results for each batch
console.log("results: ", results);
});
}
I parse the csv file
const Promise = require("bluebird");
const csv = require("fast-csv");
const path = require("path");
const fs = Promise.promisifyAll(require("fs"));
const promiseCSV = Promise.method((filePath, options) => {
return new Promise((resolve, reject) => {
var records = [];
csv
.fromPath(filePath, options)
.on("data", record => {
records.push(record);
})
.on("end", () => {
// console.log(records);
resolve(records);
});
});
});
And here is the script that connects it all together:
const path = require("path");
const promiseCSV = require("./helpers/ImportCSVFiles");
const {
connectToMongo,
bulkImportToMongo
} = require("./helpers/mongoOperations");
const filePath = path.join(__dirname, "../../data/parts.csv");
const options = {
delimiter: ";",
noheader: true,
headers: [
"facility",
"partNumber",
"partName",
"partDescription",
"netWeight",
"customsTariff"
]
};
connectToMongo("autoMDM");
promiseCSV(filePath, options).then(records => {
bulkImportToMongo(records, "parts.js");
});
//It looks like your issue is simply i++. Perhaps you meant i += 100?
for (let i = 0; i < batchCount; i+=100 /* NOT i++ */) {
//...
}
I solved it.
I hope this helps other... :-)
I had two errors, in the function promiseCSV (changed to parseCSV) and second I had bad logic in bulkImportToMongo.
Complete solution:
I parsed and imported 602.198 objects and here is how long time it took using node --max_old_space_size=8000 on a MacBook Pro with 8gb of ram.
console
➜ database git:(master) ✗ node --max_old_space_size=8000 partImport.js
Connected to db!
Time to parse file: : 5209.325ms
Disconnected from db!
Time to import parsed objects to db: : 153606.545ms
➜ database git:(master) ✗
parseCSV.js
const csv = require("fast-csv");
function promiseCSV(filePath, options) {
return new Promise((resolve, reject) => {
console.time("Time to parse file");
var records = [];
csv
.fromPath(filePath, options)
.on("data", record => {
records.push(record);
})
.on("end", () => {
console.timeEnd("Time to parse file");
resolve(records);
});
});
}
module.exports = promiseCSV;
mongodb.js
const mongoose = require("mongoose");
mongoose.Promise = global.Promise;
function connectToMongo(databaseName) {
mongoose.connect(`mongodb://localhost:27017/${databaseName}`, {
keepAlive: true,
reconnectTries: Number.MAX_VALUE,
useMongoClient: true
});
console.log("Connected to db!");
}
function disconnectFromMongo() {
mongoose.disconnect();
console.log("Disconnected from db!");
}
function bulkImportToMongo(arrayToImport, mongooseModel) {
const Model = require(`../../../models/${mongooseModel}`);
const batchSize = 100;
let batchCount = Math.ceil(arrayToImport.length / batchSize);
let recordsLeft = arrayToImport.length;
let ops = [];
let counter = 0;
for (let i = 0; i < batchCount; i++) {
let batch = arrayToImport.slice(counter, counter + batchSize);
counter += batchSize;
ops.push(Model.insertMany(batch));
}
return Promise.all(ops);
}
module.exports.bulkImportToMongo = bulkImportToMongo;
module.exports.connectToMongo = connectToMongo;
module.exports.disconnectFromMongo = disconnectFromMongo;
partImport.js
const path = require("path");
const parseCSV = require("./helpers/parseCSV");
const {
connectToMongo,
disconnectFromMongo,
bulkImportToMongo
} = require("./helpers/mongodb");
const filePath = path.join(__dirname, "../../data/parts.csv");
const options = {
delimiter: ";",
noheader: true,
headers: [
"facility",
"partNumber",
"partName",
"partDescription",
"netWeight",
"customsTariff"
]
};
connectToMongo("autoMDM");
parseCSV(filePath, options)
.then(records => {
console.time("Time to import parsed objects to db");
return bulkImportToMongo(records, "parts.js");
})
/* .then(result =>
console.log("Total batches inserted: ", result, result.length)
) */
.then(() => {
disconnectFromMongo();
console.timeEnd("Time to import parsed objects to db");
})
.catch(error => console.log(error));

Categories

Resources