I'm a total JS noob. I have read a JSON and filtered out specific items from it and saved it to a variable named mapped. How do I export this JSON to a properly formatted CSV file?
let json = require('./users.json')
let filtered = json.Users.filter((a)=>{
return new Date(a.UserCreateDate) > new Date('2020-05-11T00:00:00.000000+05:30')
})
let mapped=filtered.map((a)=>{
let email
a.Attributes.forEach(element => {
if(element.Name=='email'){
email = element.Value
}
});
return {
name: a.Username,
email: email,
UserCreateDate: a.UserCreateDate,
UserStatus: a.UserStatus
}
})
console.log(JSON.stringify(mapped, null, 4), mapped.length)
Although there are quite a few answers to this topic, I haven't been able to successfully implement any of those.
I assume you wanna use the keys as the header in CSV file.
What you need is to open a write stream, the format of the CSV file is pure strings with commas separating the values.
// ...
let mapped=filtered.map((a)=>{
//...
})
// ...
const fs = require('fs');
let writeStream = fs.createWriteStream('./output.csv');
writeStream.on('open', () => {
// get the header
const header = Object.keys(mapped[0]).toString().concat('\n');
writeStream.write(header);
mapped.forEach((obj) => {
const values = Object.values(obj).toString().concat('\n');
writeStream.write(values);
});
writeStream.end();
});
Related
I am trying to store data from a csv file in an array using createReadStream in node.js but for some reason the array stays empty and no data is stored in it.
When i console log the values where i'am trying to push the data it logs it just fine.
I've tried both csv-parser and csv-parse for this but none of them seem to work.
const fs = require("fs");
const csv = require('csv-parser');
function parseCSV () {
let finaldata = []
fs.createReadStream('data.csv')
.pipe(csv())
.on('data', function (row){
finaldata.push({Userid: row[0], Timestamp: row[1]})
}).on('end', function(){
console.log(finaldata)
})
}
parseCSV()
I fixed this using the following code below.
I kept experimenting to land on something that finally worked.
I have no idea why this fix works but i would love an explanation.
The change i made here is to call the Object constructor instead of using {} so finaldata.push(Object(row)) instead of finaldata.push({Userid: row[0], Timestamp: row[1]})
const fs = require("fs");
const csv = require('csv-parser');
const parseCSV = () => {
let finaldata = []
fs.createReadStream('data.csv')
.pipe(csv())
.on('data', (row) => {
finaldata.push(Object(row))
}).on('end', () => {
console.log(finaldata)
})
}
parseCSV()
I'm trying to use stream-json to read a zip, unzip it, and then write it to file. I don't think I understand how to use the library.
Based on the link above, they have this example:
const {chain} = require('stream-chain');
const {parser} = require('stream-json');
const {pick} = require('stream-json/filters/Pick');
const {ignore} = require('stream-json/filters/Ignore');
const {streamValues} = require('stream-json/streamers/StreamValues');
const fs = require('fs');
const zlib = require('zlib');
const pipeline = chain([
fs.createReadStream('sample.json.gz'),
zlib.createGunzip(),
parser(),
pick({filter: 'data'}),
ignore({filter: /\b_meta\b/i}),
streamValues(),
data => {
const value = data.value;
// keep data only for the accounting department
return value && value.department === 'accounting' ? data : null;
}
]);
let counter = 0;
pipeline.on('data', () => ++counter);
pipeline.on('end', () =>
console.log(`The accounting department has ${counter} employees.`));
However I don't want to count anything, I just want to write to file. Here is what I have that works:
function unzipJson() {
const zipPath = Path.resolve(__dirname, 'resources', 'AllPrintings.json.zip');
const jsonPath = Path.resolve(__dirname, 'resources', 'AllPrintings.json');
console.info('Attempting to read zip');
return new Promise((resolve, reject) => {
let error = null;
Fs.readFile(zipPath, (err, data) => {
error = err;
if (!err) {
const zip = new JSZip();
zip.loadAsync(data).then((contents) => {
Object.keys(contents.files).forEach((filename) => {
console.info(`Writing ${filename} to disk...`);
zip.file(filename).async('nodebuffer').then((content) => {
Fs.writeFileSync(jsonPath, content);
}).catch((writeErr) => { error = writeErr; });
});
}).catch((zipErr) => { error = zipErr; });
resolve();
} else if (error) {
console.log(error);
reject(error);
}
});
});
}
However I can't easily add any processing to this, so I wanted to replace it with stream-json. This is my partial attempt, as I don't know how to finish:
function unzipJson() {
const zipPath = Path.resolve(__dirname, 'resources', 'myfile.json.zip');
const jsonPath = Path.resolve(__dirname, 'resources', 'myfile.json');
console.info('Attempting to read zip');
const pipeline = chain([
Fs.createReadStream(zipPath),
zlib.createGunzip(),
parser(),
Fs.createWriteStream(jsonPath),
]);
// use the chain, and save the result to a file
pipeline.on(/*what goes here?*/)
Later on I intend to add extra processing of the json file(s), but I want to learn the basics before I start throwing in extra functionality.
I can't produce a minimal example unfortunately, as I don't know what goes into the pipeline.on function. I'm trying to understand what I should do, not what I've done wrong.
I also looked at the related stream-chain, which has an example that ends like so:
// use the chain, and save the result to a file
dataSource.pipe(chain).pipe(fs.createWriteStream('output.txt.gz'));`
But at no point does the documentation explain where dataSource comes from, and I think my chain creates it's own by reading the zip from file?
How am I supposed to use these streaming libraries to write to file?
I don't want to count anything, I just want to write to file
In that case, you'll need to convert the token/JSON data stream back into a text stream that you can write to a file. You can use the library's Stringer for that. Its documentation also contains an example that seems to be more in line with what you want to do:
chain([
fs.createReadStream('data.json.gz'),
zlib.createGunzip(),
parser(),
pick({filter: 'data'}), // omit this if you don't want to do any processing
stringer(),
zlib.Gzip(), // omit this if you want to write an unzipped result
fs.createWriteStream('edited.json.gz')
]);
I am building an online code editor. Currently, I am storing the source file and input file on my server. Is there any way to store the source files on the client-side? I am using UUID to generate a random filename but want to keep it main.c, main.CPP, just online GDB. But if I cannot have a constant file name or collide when other users are using it.
the code to generate the file
const path = require("path");
const { v4: uuid } = require("uuid");
const dirCodes = path.join(__dirname, "codes");
const inputPath = path.join(__dirname, "inputs");
if (!fs.existsSync(dirCodes)) {
fs.mkdirSync(dirCodes, { recursive: true });
}
if (!fs.existsSync(inputPath)) {
fs.mkdirSync(inputPath, { recursive: true });
}
const generateFile = async (jobId, format, content, input) => {
const filename = `${jobId}.${format}`;
const inputFileName = `${jobId}.txt`;
const inputFilePath = path.join(inputPath, inputFileName);
const filepath = path.join(dirCodes, filename);
await fs.writeFileSync(filepath, content);
await fs.writeFileSync(inputFilePath, input);
return [filepath, inputFileName];
};
module.exports = {
generateFile,
};
code for executing the file
const fs = require("fs");
const path = require("path");
const { v4: uuid } = require("uuid");
const dirCodes = path.join(__dirname, "codes");
const inputPath = path.join(__dirname, "inputs");
if (!fs.existsSync(dirCodes)) {
fs.mkdirSync(dirCodes, { recursive: true });
}
if (!fs.existsSync(inputPath)) { random file names will not work for java as we run the class name
fs.mkdirSync(inputPath, { recursive: true });
}
const generateFile = async (jobId, format, content, input) => {
const filename = `${jobId}.${format}`;
const inputFileName = `${jobId}.txt`;
const inputFilePath = path.join(inputPath, inputFileName);
const filepath = path.join(dirCodes, filename);
await fs.writeFileSync(filepath, content);
await fs.writeFileSync(inputFilePath, input);
return [filepath, inputFileName];
};
module.exports = {
generateFile,
};
I want to keep the file name constant. Also, random file names will not work for java as we run the classname.class file.
Please let me know if can store the file in client side or local storage
Unfortunately browsers don't allow Filesystem access for clients as it is a security risk.
If you want to avoid saving files in the server, then you can use localStorage/sessionStorage to persist data accross sessions at the client level.
Then in your case, you'll have a button to "download" the code directory.
we have 2 ways to save some data.
1. localStorage [ permanently ]
2. sessionStorage [ hinges on session ]
for both, the function goes with a string format of your file.
to save some data - localStorage.set('data', JSON.stringify(data))
to retrieve those data - JSON.parse(localStorage.get('data'))
the reason we use for parsing is to convert back to its own format [ object/array ]
to remove data from storage - localStorage.remove('data')
You can use
window.localStorage.setItem(jobId, content);
If the content is a JSON object you can use
window.localStorage.setItem(jobId, JSON.stringify(content));
And retrieve it like this:
let json_object = JSON.parse(localStorage.getItem(jobId));
I am writing a lambda function that fetches data from DynamoDB and stores it in an array. Now I want to create a CSV file from this array and return it. (preferably directly from the lambda function, rather than uploading it to s3 and then sharing the link). Any idea, how to do this?
My code until now -
import AWS from "aws-sdk";
import createError from "http-errors";
import commonMiddleware from "../lib/commonMiddleware";
const dynamodb = new AWS.DynamoDB.DocumentClient();
async function getFile(event, context) {
const { id: someId } = event.pathParameters;
let data;
const params = {
TableName: process.env.TABLE_NAME,
IndexName: "GSIsomeId",
KeyConditionExpression: "someId = :someId",
ExpressionAttributeValues: {
":someId": someId,
},
};
try {
const result = await dynamodb.query(params).promise();
data = result.Items;
} catch (error) {
console.error(error);
throw new createError.InternalServerError(error);
}
// data is array of objects which I can change to 2d array using Object.values()
// I want to create and return a CSV from this array
return {
statusCode: 200,
body: JSON.stringify(data),
};
}
export const handler = commonMiddleware(getFile);
Once you generated the csv using any of the approaches mentioned here Convert JSON array to CSV in Node
I guess you can try sending back the byte array of the file.
I have an excel sheet with a column that has data in json format. I want to convert this sheet into json. The other two columns get converted into json but this particular column cannot be treated as a json object and instead results into a string. JSON.parse() does not work on this string and throws a Syntax Error. Here are the things I have tried:
Convert excel sheet to json using xlsx package
Convert excel to csv to json using csvtojson package
Neither helped me successfully convert the last column into a json object
Below is my code:
let xlsx = require("xlsx")
const csvtojson = require("csvtojson")
let path = require("path")
let fs = require("fs");
const inputFilePath = path.join(__dirname, './mastersheet.xlsx');
let File = xlsx.readFile(inputFilePath);
let content = xlsx.utils.sheet_to_csv(File.Sheets['Sheet6']);
fs.writeFile('./mastersheet.csv', content, (err) => {
if (err) console.log(err)
})
csvtojson()
.fromFile('./mastersheet.csv')
.then((jsonObj) => {
console.log(jsonObj);
fs.writeFileSync("mastersheet.json", JSON.stringify(validJsonData), 'utf8', (err) => {
if (err) console.log(err);
})
});
Any help is appreciated.
you can use convert-excel-to-json and you convert excel to json like this:
const excelToJson = require('convert-excel-to-json');
const fs = require('fs');
const result = excelToJson({
source: fs.readFileSync('exlTest.xlsx'),
header:{
rows: 1
}
});
console.log('result :', result);