Node.js node-csv module - working with a local CSV file - javascript

I'm trying to use the new version of the node-csv node module to do some CSV manipulation.
node-csv
I've used perl in the past, but would like to try JavaScript this time. I'm having trouble figuring out how to import a local CSV file instead of using the built in generator. The documentation for node-csv doesn't show how to do this as far as I can tell(although it does provide an example for the previous version).
Here is the example code, which works as expected.
var csv = require('csv');
var generator = csv.generate({seed: 1, columns: 2, length: 20});
var parser = csv.parse();
var transformer = csv.transform(function(data){
return data.map(function(value){return value.toUpperCase()});
});
var stringifier = csv.stringify();
generator.on('readable', function(){
while(data = generator.read()){
parser.write(data);
}
});
parser.on('readable', function(){
while(data = parser.read()){
transformer.write(data);
}
});
transformer.on('readable', function(){
while(data = transformer.read()){
stringifier.write(data);
}
});
stringifier.on('readable', function(){
while(data = stringifier.read()){
process.stdout.write(data);
}
});
I plan on using the FS module, but am not sure how to pass the local file into the node-csv functions.
var fs = require('fs');
Here is an example for the PREVIOUS version, which uses completely different syntax:
// node samples/sample.js
var csv = require('csv');
var fs = require('fs');
csv()
.from.stream(fs.createReadStream(__dirname+'/sample.in'))
.to.path(__dirname+'/sample.out')
.transform( function(row){
row.unshift(row.pop());
return row;
})
.on('record', function(row,index){
console.log('#'+index+' '+JSON.stringify(row));
})
.on('end', function(count){
console.log('Number of lines: '+count);
})
.on('error', function(error){
console.log(error.message);
});
Any suggestions?

Related

How can I render a static HTML file with Handlebars on a Nodejs server?

I have come across plenty of resources online for this but haven't been able to find one that is straight forward enough for me to understand.
At the moment, I have multiple massive <script> tags in an HTML document that has handlebars content. The server sends this HTML document to the client where the client then renders the page with data from an AJAX call. I'd like to move this entire process server-side so that all the server has to do is send a static file and re-render the page when data is updated. Data changes a few times per day - which is why it isn't hard coded in and I would like to run the handlebars compiler on the HTML document when data is updated.
Is it possible to simply put the HTML document with handlebars templating in <script> tags through a function to generate a new HTML file with data filled in?
Here is the code I have within my app.js file that is runned the Node server that does not do what I want it to:
function registerHelpers(callback){
Handlebars.registerHelper('equal', function(lvalue, rvalue, options) {
if (arguments.length < 3)
throw new Error("Handlebars Helper equal needs 2 parameters");
if( lvalue!=rvalue ) {
return options.inverse(this);
} else {
return options.fn(this);
}
});
Handlebars.registerHelper('trim', function(text) {
text = text.replace(/ /g, '');
return new Handlebars.SafeString(text);
});
callback();
}
function buildHomePage() {
var source = require(__dirname + '/public/home.handlebars');
var template = Handlebars.precompile(source);
var collection = db.get('datalist'); //Monk call to MongoDB
collection.find({}, function (err, docs){
var result = template(docs);
console.log(result)
var fs = require('fs');
fs.writeFile("test.html", result, function(err) {
if(err) {
console.log(err);
}
});
});
};
registerHelpers(buildHomePage);
The following can render handlebars to static html. Run node example.js. You may need to run npm install --save handlebars prior.
var fs = require('fs');
var Handlebars = require('handlebars');
function render(filename, data)
{
var source = fs.readFileSync(filename,'utf8').toString();
var template = Handlebars.compile(source);
var output = template(data);
return output;
}
var data = JSON.parse(fs.readFileSync("./data/strings.json", 'utf8'));
var result = render('./templates/somefile.html', data);
console.log(result);
If your handlebars templates are simple, with only string replacement, you can do this with underscore.js. Assume this example is named 'generate.js'
var fs = require('fs');
var _ = require('underscore');
_.templateSettings.interpolate = /\{\{(.+?)\}\}/g;
function render(filename, data)
{
var source = fs.readFileSync(filename,'utf8').toString();
var compiled = _.template(source);
return compiled(data);
}
var data = JSON.parse(fs.readFileSync("./data/strings.json", 'utf8'));
var result = render('./templates/somefile.html', data);
console.log(result);
Then run node generate.js to output the rendered template to the console. You may need to do npm install --save underscore prior.

csvtojson conversion using Nodejs gives gibberish data

I am trying to convert a csv file to json using csvtojson converter in nodejs.My code is as shown below.
I get an output of data as shown below. I am not sure why this is happening and how to prevent it.
var Converter = require("csvtojson").Converter;
var fileStream = fs.createReadStream("input.csv");
var converter = new Converter({constructResult:false});
converter.on("end_parsed", function (jsonObj) {
var jsonfile = require('jsonfile');
var file = 'output.json';
jsonfile.writeFile(file, jsonObj, function (err) { console.error(err); });});
fileStream.pipe(converter);
{"��P\u0000a\u0000c\u0000k\u0000a\u0000g\u0000e\u0000 \u0000N\u0000a\u0000m\u0000e\u0000":"\u0000c\u0000o\u0000m\u0000.\u0000t\u0000r\u0000i\u0000n\u0000e\u0000t\u0000.\u0000h\u0000r\u0000p\u0000m\u0000o\u0000b\u0000i\u0000l\u0000e\u0000","\u0000A\u0000p\u0000p\u0000 \u0000V\u0000e\u0000r\u0000s\u0000i\u0000o\u0000n\u0000 \u0000C\u0000o\u0000d\u0000e\u0000":"\u00006\u00002\u0000","\u0000R\u0000e\u0000v\u0000i\u0000e\u0000w\u0000e\u0000r\u0000 \u0000L\u0000a\u0000n\u0000g\u0000u\u0000a\u0000g\u0000e\u0000":"\u0000e\u0000n\u0000","\u0000D\u0000e\u0000v\u0000i\u0000c\u0000e\u0000":"\u0000","\u0000R\u0000e\u0000v\u0000i\u0000e\u0000w\u0000 \u0000S\u0000u\u0000b\u0000m\u0000i\u0000t\u0000 \u0000D\u0000a\u0000t\u0000e\u0000 \u0000a\u0000n\u0000d\u0000 \u0000T\u0000i\u0000m\u0000e\u0000":"\u00002\u00000\u00001\u00005\u0000-\u00001\u00002\u0000-\u00002\u00002\u0000T\u00000\u00003\u0000:\u00003\u00002\u0000:\u00003\u00008\u0000Z\u0000","\u0000R\u0000e\u0000v\u0000i\u0000e\u0000w\u0000 \u0000S\u0000u\u0000b\u0000m\u0000i\u0000t\u0000 \u0000M\u0000i\u0000l\u0000l\u0000i\u0000s\u0000 \u0000S\u0000i\u0000n\u0000c\u0000e\u0000 \u0000E\u0000p\u0000o\u0000c\u0000h\u0000":"\u00001\u00004\u00005\u00000\u00007\u00005\u00005\u00001\u00005\u00008\u00002\u00006\u00002\u0000","\u0000R\u0000e\u0000v\u0000i\u0000e\u0000w\u0000 \u0000L\u0000a\u0000s\u0000t\u0000 \u0000U\u0000p\u0000d\u0000a\u0000t\u0000e\u0000 \u0000D\u0000a\u0000t\u0000e\u0000 \u0000a\u0000n\u0000d\u0000 \u0000T\u0000i\u0000m\u0000e\u0000":"\u00002\u00000\u00001\u00005\u0000-\u00001\u00002\u0000-\u00002\u00002\u0000T\u00000\u00003\u0000:\u00003\u00002\u0000:\u00003\u00008\u0000Z\u0000","\u0000R\u0000e\u0000v\u0000i\u0000e\u0000w\u0000 \u0000L\u0000a\u0000s\u0000t\u0000 \u0000U\u0000p\u0000d\u0000a\u0000t\u0000e\u0000 \u0000M\u0000i\u0000l\u0000l\u0000i\u0000s\u0000 \u0000S\u0000i\u0000n\u0000c\u0000e\u0000 \u0000E\u0000p\u0000o\u0000c\u0000h\u0000":"\u00001\u00004\u00005\u00000\u00007\u00005\u00005\u00001\u00005\u00008\u00002\u00006\u00002\u0000","\u0000S\u0000t\u0000a\u0000r\u0000 \u0000R\u0000a\u0000t\u0000i\u0000n\u0000g\u0000":"\u00005\u0000","\u0000R\u0000e\u0000v\u0000i\u0000e\u0000w\u0000 \u0000T\u0000i\u0000t\u0000l\u0000e\u0000":"\u0000","\u0000R\u0000e\u0000v\u0000i\u0000e\u0000w\u0000 \u0000T\u0000e\u0000x\u0000t\u0000":"\u0000","\u0000D\u0000e\u0000v\u0000e\u0000l\u0000o\u0000p\u0000e\u0000r\u0000 \u0000R\u0000e\u0000p\u0000l\u0000y\u0000 \u0000D\u0000a\u0000t\u0000e\u0000 \u0000a\u0000n\u0000d\u0000 \u0000T\u0000i\u0000m\u0000e\u0000":"\u0000","\u0000D\u0000e\u0000v\u0000e\u0000l\u0000o\u0000p\u0000e\u0000r\u0000 \u0000R\u0000e\u0000p\u0000l\u0000y\u0000 \u0000M\u0000i\u0000l\u0000l\u0000i\u0000s\u0000 \u0000S\u0000i\u0000n\u0000c\u0000e\u0000 \u0000E\u0000p\u0000o\u0000c\u0000h\u0000":"\u0000","\u0000D\u0000e\u0000v\u0000e\u0000l\u0000o\u0000p\u0000e\u0000r\u0000 \u0000R\u0000e\u0000p\u0000l\u0000y\u0000 \u0000T\u0000e\u0000x\u0000t\u0000":"\u0000","\u0000R\u0000e\u0000v\u0000i\u0000e\u0000w\u0000 \u0000L\u0000i\u0000n\u0000k\u0000":"\u0000"},
{"��P\u0000a\u0000c\u0000k\u0000a\u0000g\u0000e\u0000 \u0000N\u0000a\u0000m\u0000e\u0000":"\u0000"}
I resolved this issue: It was an encoding issue: correct code was "utf16 "
var csvEncoding = { encoding: 'utf16le' };
var csvString = fs.readFileSync(csvfile, csvEncoding).toString();
converter.fromString(csvString, function(err,result){
//your code here console.log(err);
console.log(result);
});
I had a very similar issue to OP but was using csv-parse with a file coming out of S3.
Thanks to OP I got on the right path with an encoding issue, I was able to resolve my issue by using utf16le in my stream coupled with iconv-lite like so:
s3
.getObject(getObjectParams)
.createReadStream()
.on('end', () => cb(null))
.pipe(iconv.decodeStream('utf16le'))
.pipe(parse({ delimiter: '\t', columns: true }))
.pipe(transformer);
Hopefully this helps others in the same boat!
var Converter = require("csvtojson").Converter;
var fs = require('fs');
var fileStream = fs.createReadStream("input.csv");
var converter = new Converter({constructResult:true});
converter.on("end_parsed", function (jsonObj) {
var jsonfile = require('jsonfile');
var file = 'output.json';
console.log(jsonObj);
jsonfile.writeFile(file, jsonObj, function (err,result) {
console.error(err);
console.log(result) ;
});
});
fileStream.pipe(converter);

Unexpected empty writestream in collectionFS using graphicsmagick

I'm using CollectionFS for managing images. Furthermore I'm using graphicsmagick gm() for manipulating images.
Now I want to crop a already saved image. Therefore on a click event a server-method is called, which does the crop(). But after doing this, in the collection I find an empty image with size=0 updated on the correct date.
I don't see, what I am doing wrong.
shared.js
Images = new FS.Collection("images", {
stores: [
new FS.Store.FileSystem("thumbnail", {
transformWrite: function(fileObj, readStream, writeStream) {
gm(readStream, fileObj.name()).autoOrient().resize('96', '96' + '^').gravity('Center').extent('96', '96').stream().pipe(writeStream);
}
}),
new FS.Store.FileSystem("public"),
]
});
server.js
Meteor.methods({
'crop': function (fileId, selection) {
var file = Images.findOne({ _id: fileId }),
read = file.createReadStream('public'),
write = file.createWriteStream('public');
gm(read)
.crop(selection.width, selection.height, selection.left, selection.top)
.stream()
.pipe(write);
}
});
client.js
Template.editor.events({
'click #crop': function () {
var fileId = '123456789',
selection = { height: 100, width: 100, top: 10, left: 10 };
Meteor.call('crop', fileId, selection);
}
});
Update
As recommended by Christian I'm using a tmp-file for the writeStream, because the writeStream can't be the same like the readStream - which caused the empty result.
But after writing to the tmp-file, the content of it has to be copied back to the public store. How do I do that?
Meteor.methods({
'crop': function (fileId, selection) {
var fs = Meteor.npmRequire('fs'),
file = Images.findOne({ _id: fileId }),
read = file.createReadStream('public'),
filename = '/tmp/gm_' + Date.now(),
tmp = fs.createWriteStream(filename);
gm(read)
.crop(selection.width, selection.height, selection.left, selection.top)
.stream()
.pipe(tmp);
// After writing to tmp -> copy back to stream and delete tmp-file
}
});
Update 2
I tried this one:
// Add temp store
new FS.Store.FileSystem("temp")
// Method
Meteor.methods({
'crop': function (fileId, selection) {
var file = Images.findOne({ _id: fileId }),
read = file.createReadStream('public'),
temp = file.createWriteStream('temp');
gm(read)
.crop(selection.width, selection.height, selection.left, selection.top)
.stream()
.pipe(tmp)
.on('end', function () {
var tmpread = file.createReadStream('temp'),
write = file.createWriteStream('public');
gm(tmpread).stream().pipe(write);
});
}
});
You can't read and write into the same file. This is equivalent to things like
cat test | grep 1 > test
on the shell. You can try it and see that test will be empty afterwards.
You need to create an intermediate, temporary file in your crop method.
Assuming that is indeed the problem, then this is one way of doing this (not tested):
var fs = Meteor.npmRequire('fs');
var file = Images.findOne({ _id: fileId }),
var read = file.createReadStream('public'),
var filename = '/tmp/gm_' + Date.now();
var tmp = fs.createWriteStream(filename);
var gmread = gm(read)
.crop(selection.width, selection.height, selection.left, selection.top)
.stream();
gmread.on('end', function() {
// done streaming through GM, copy the result back:
var tmpread = fs.createReadStream(filename);
var write = file.createWriteStream('public');
tmpread.pipe(write);
});
gmread.pipe(tmp);

Properties are undefined inside of evalute in PhantomJS

I'm fetching an website's content via phantomjs by including jquery with the page. Now i have to write them to a file via program. For that i used the following code
page.onLoadFinished = (function(status) {
if (status === 'success') {
page.includeJs('http://ajax.googleapis.com/ajax/libs/jquery/1.10.2/jquery.min.js', function() {
page.evaluate(function() {
var mkdirp = require('mkdirp');
mkdirp(counter+'_folder', function(err) {
var html = $('pre[data-language="html"]').html();
var js = $('pre[data-language="js"]').html();
var css = $('pre[data-language="css"]').html();
var fs = require('fs');
fs.writeFile(counter+"_folder/"+"fiddle.html", html, function(err) {});
fs.writeFile(counter+"_folder/"+"fiddle.css", css, function(err) {});
fs.writeFile(counter+"_folder/"+"fiddle.js", js, function(err) {});
console.log("******* "+counter+" *************");
});
});
});
}
});
page.open(url[counter]);
Now what happening is inside evalute method when I'm using require the program is getting stopped there showing error cannot find variable require. Any idea why this is appering?
page.evaluate() is the sandboxed page context. It has no access to require, page, phantom ...
Furthermore, mkdirp is a node module which will not work with PhantomJS. If you want to use PhantomJS from node, you will have to use a bridge like phantom. See also: Use a node module from casperjs
Using that bridge, you have to pass the variables to the outside and save it from there:
page.open(url, function(){
var mkdirp = require('mkdirp');
mkdirp(counter+'_folder', function(err) {
page.evaluate(function() {
var html = $('pre[data-language="html"]').html();
var js = $('pre[data-language="js"]').html();
var css = $('pre[data-language="css"]').html();
return [html, js, css];
}, function(result){
var fs = require('fs');
fs.writeFile(counter+"_folder/"+"fiddle.html", stuff[0], function(err) {});
fs.writeFile(counter+"_folder/"+"fiddle.css", stuff[1], function(err) {});
fs.writeFile(counter+"_folder/"+"fiddle.js", stuff[2], function(err) {});
console.log("******* "+counter+" *************");
});
});
});
Note: PhantomJS' fs module doesn't have a writeFile function. Node and PhantomJS have different execution environments.

How to convert CSV to JSON in Node.js

I am trying to convert csv file to json. I am using .
Example CSV:
a,b,c,d
1,2,3,4
5,6,7,8
...
Desired JSON:
{"a": 1,"b": 2,"c": 3,"d": 4},
{"a": 5,"b": 6,"c": 7,"d": 8},
...
I tried node-csv parser library.But the output is like array not like I expected.
I'm using Node 0.8 and express.js and would like a recommendation on how to easily accomplish this.
Node.js csvtojson module is a comprehensive nodejs csv parser. It can be used as node.js app library / a command line tool / or browser with help of browserify or webpack.
the source code can be found at: https://github.com/Keyang/node-csvtojson
It is fast with low memory consumption yet powerful to support any of parsing needs with abundant API and easy to read documentation.
The detailed documentation can be found here
Here are some code examples:
Use it as a library in your Node.js application (csvtojson#2.0.0 +):
Install it through npm
npm install --save csvtojson#latest
Use it in your node.js app:
// require csvtojson
var csv = require("csvtojson");
// Convert a csv file with csvtojson
csv()
.fromFile(csvFilePath)
.then(function(jsonArrayObj){ //when parse finished, result will be emitted here.
console.log(jsonArrayObj);
})
// Parse large csv with stream / pipe (low mem consumption)
csv()
.fromStream(readableStream)
.subscribe(function(jsonObj){ //single json object will be emitted for each csv line
// parse each json asynchronousely
return new Promise(function(resolve,reject){
asyncStoreToDb(json,function(){resolve()})
})
})
//Use async / await
const jsonArray=await csv().fromFile(filePath);
Use it as a command-line tool:
sh# npm install csvtojson
sh# ./node_modules/csvtojson/bin/csvtojson ./youCsvFile.csv
-or-
sh# npm install -g csvtojson
sh# csvtojson ./yourCsvFile.csv
For advanced usage:
sh# csvtojson --help
You can find more details from the github page above.
You can try to use underscore.js
First convert the lines in arrays using the toArray function :
var letters = _.toArray(a,b,c,d);
var numbers = _.toArray(1,2,3,4);
Then object the arrays together using the object function :
var json = _.object(letters, numbers);
By then, the json var should contain something like :
{"a": 1,"b": 2,"c": 3,"d": 4}
Had to do something similar, hope this helps.
// Node packages for file system
var fs = require('fs');
var path = require('path');
var filePath = path.join(__dirname, 'PATH_TO_CSV');
// Read CSV
var f = fs.readFileSync(filePath, {encoding: 'utf-8'},
function(err){console.log(err);});
// Split on row
f = f.split("\n");
// Get first row for column headers
headers = f.shift().split(",");
var json = [];
f.forEach(function(d){
// Loop through each row
tmp = {}
row = d.split(",")
for(var i = 0; i < headers.length; i++){
tmp[headers[i]] = row[i];
}
// Add object to list
json.push(tmp);
});
var outPath = path.join(__dirname, 'PATH_TO_JSON');
// Convert object to string, write json to file
fs.writeFileSync(outPath, JSON.stringify(json), 'utf8',
function(err){console.log(err);});
Here is a solution that does not require a separate module. However, it is very crude, and does not implement much error handling. It could also use more tests, but it will get you going. If you are parsing very large files, you may want to seek an alternative. Also, see this solution from Ben Nadel.
Node Module Code, csv2json.js:
/*
* Convert a CSV String to JSON
*/
exports.convert = function(csvString) {
var json = [];
var csvArray = csvString.split("\n");
// Remove the column names from csvArray into csvColumns.
// Also replace single quote with double quote (JSON needs double).
var csvColumns = JSON
.parse("[" + csvArray.shift().replace(/'/g, '"') + "]");
csvArray.forEach(function(csvRowString) {
var csvRow = csvRowString.split(",");
// Here we work on a single row.
// Create an object with all of the csvColumns as keys.
jsonRow = new Object();
for ( var colNum = 0; colNum < csvRow.length; colNum++) {
// Remove beginning and ending quotes since stringify will add them.
var colData = csvRow[colNum].replace(/^['"]|['"]$/g, "");
jsonRow[csvColumns[colNum]] = colData;
}
json.push(jsonRow);
});
return JSON.stringify(json);
};
Jasmine Test, csv2jsonSpec.js:
var csv2json = require('csv2json.js');
var CSV_STRING = "'col1','col2','col3'\n'1','2','3'\n'4','5','6'";
var JSON_STRING = '[{"col1":"1","col2":"2","col3":"3"},{"col1":"4","col2":"5","col3":"6"}]';
/* jasmine specs for csv2json */
describe('csv2json', function() {
it('should convert a csv string to a json string.', function() {
expect(csv2json.convert(CSV_STRING)).toEqual(
JSON_STRING);
});
});
If you want just a command line converter, the quickest and most clean solution for me is to use csvtojson via npx (included by default in node.js)
$ npx csvtojson ./data.csv > data.json
Using ES6
const toJSON = csv => {
const lines = csv.split('\n')
const result = []
const headers = lines[0].split(',')
lines.map(l => {
const obj = {}
const line = l.split(',')
headers.map((h, i) => {
obj[h] = line[i]
})
result.push(obj)
})
return JSON.stringify(result)
}
const csv = `name,email,age
francis,francis#gmail.com,33
matty,mm#gmail.com,29`
const data = toJSON(csv)
console.log(data)
Output
// [{"name":"name","email":"email","age":"age"},{"name":"francis","email":"francis#gmail.com","age":"33"},{"name":"matty","email":"mm#gmail.com","age":"29"}]
Using lodash:
function csvToJson(csv) {
const content = csv.split('\n');
const header = content[0].split(',');
return _.tail(content).map((row) => {
return _.zipObject(header, row.split(','));
});
}
I haven't tried csv package https://npmjs.org/package/csv but according to documentation it looks quality implementation http://www.adaltas.com/projects/node-csv/
I started with node-csvtojson, but it brought too many dependencies for my linking.
Building on your question and the answer by brnd, I used node-csv and underscore.js.
var attribs;
var json:
csv()
.from.string(csvString)
.transform(function(row) {
if (!attribs) {
attribs = row;
return null;
}
return row;
})
.to.array(function(rows) {
json = _.map(rows, function(row) {
return _.object(attribs, row);
});
});
I have a very simple solution to just print json from csv on console using csvtojson module.
// require csvtojson
var csv = require("csvtojson");
const csvFilePath='customer-data.csv' //file path of csv
csv()
.fromFile(csvFilePath)``
.then((jsonObj)=>{
console.log(jsonObj);
})
I have used csvtojson library for converting csv string to json array.
It has variety of function which can help you to convert to JSON.
It also supports reading from file and file streaming.
Be careful while parsing the csv which can contain the comma(,) or any other delimiter .
For removing the delimiter please see my answer here.
Step 1:
Install node module:
npm install csvtojson --save
Step 2:
var Converter = require("csvtojson").Converter;
var converter = new Converter({});
converter.fromFile("./path-to-your-file.csv",function(err,result){
if(err){
console.log("Error");
console.log(err);
}
var data = result;
//to check json
console.log(data);
});
Node-ETL package is enough for all BI processing.
npm install node-etl;
Then :
var ETL=require('node-etl');
var output=ETL.extract('./data.csv',{
headers:["a","b","c","d"],
ignore:(line,index)=>index!==0, //ignore first line
});
Me and my buddy created a web service to handle this kind of thing.
Check out Modifly.co for instructions on how to transform CSV to JSON with a single RESTful call.
Use csv parser library, I'm explaining in more details how to use it here .
var csv = require('csv');
csv.parse(csvText, {columns: true}, function(err, data){
console.log(JSON.stringify(data, null, 2));
});
npm install csvjson --save
In you Node JS File
const csvjson = require('csvjson');
convertCSVToJSON(*.csv);
convertCSVToJSON = (file) => {
const convertedObj = csvjson.toObject(file);
}
csvtojson module is a comprehensive nodejs csv parser to convert csv to json or column arrays. It can be used as node.js library / command line tool / or in browser. Below are some features:
/** csv file
a,b,c
1,2,3
4,5,6
*/
const csvFilePath='<path to csv file>'
const csv=require('csvtojson')
csv()
.fromFile(csvFilePath)
.then((jsonObj)=>{
console.log(jsonObj);
/**
* [
* {a:"1", b:"2", c:"3"},
* {a:"4", b:"5". c:"6"}
* ]
*/
})
// Async / await usage
const jsonArray=await csv().fromFile(csvFilePath);
I converted a large (315 MB) csv file to json by installing the csvtojson module and then using the below code:
const fs = require('fs')
const Converter = require('csvtojson').Converter
const csvConverter = new Converter({
constructResult:false,
downstreamFormat:"array",
})
csvConverter.subscribe=function(json,row,index){
json["rowIndex"]=index
};
const readStream = fs.createReadStream('./data.csv') // my csv file
const writeStream = fs.createWriteStream('./data.json') // my new json file
readStream.pipe(csvConverter).pipe(writeStream)
The resulting json file is in the desired format:
[
{"a": 1,"b": 2,"c": 3,"d": 4},
{"a": 5,"b": 6,"c": 7,"d": 8},
]
Once figured out how to csv data into two dimention array:
[['header1','header2'],['data1','data2']]
Convert to json is simply map and reduce:
const keys = input[0]
const jsonOutput = input.slice(1)
.map(arr2 => keys.reduce((accumulator, element, index) => {
return { ...accumulator,
[element]: arr2[index]
};
}, {}))
In my case JSON.stringify didn't help as the files where too big.
This solved my needs:
let csvFile = fs.readFileSync(
csvFilePath,
{ encoding: "utf-8" },
function (err) {
console.log(err);
}
);
csvFile = csvFile.split("\n");
let strFile = "export default [";
csvFile.forEach(function (d) {
let row = d.split(",");
strFile += `[${row}],`;
});
strFile += "]";

Categories

Resources