NodeJS Logger: Remove trailing newline - javascript

I'm trying to build a custom NodeJS logger, that logs data to a file.
To append data, I use fs.createWriteStream with the a flag.
var fs = require("fs");
var stream = fs.createWriteStream("./test.log", {
encoding: "utf-8",
flags: "a",
autoClose: true
});
// Data to log to file
var data = {
timestamp: new Date().toJSON(),
message: "OK"
};
// Write data
stream.write(JSON.stringify(data) + "\n");
Logging a few times results in a file looking like this:
{"timestamp":"2020-08-25T17:45:27.733Z","message":"OK"}
{"timestamp":"2020-08-25T17:45:34.820Z","message":"OK"}
{"timestamp":"2020-08-25T17:45:41.142Z","message":"OK"}
(Heres a newline, StackOverflow removes them)
My problem is, I don't know how to remove the trailing newline.
I thought about adding the newline before each log entry, but this requires me to detect the beginning of the file (I didn't find a way to do this).
What's the best way to remove the trailing newline, if that is even possible?
Any help would be greatly appreciated!

You need to check if file is empty:
var fs = require("fs");
var stream = fs.createWriteStream("./test.log", {
encoding: "utf-8",
flags: "a",
autoClose: true
});
// Data to log to file
var data = {
timestamp: new Date().toJSON(),
message: "OK"
};
// Check if file is empty
var stats = fs.statSync("./test.log");
var isEmpty = stats["size"] == 0;
// Write data
stream.write((isEmpty ? "" : "\n") + JSON.stringify(data) );

Related

How can I parse a YAML file to read the comments as well using node.js?

I am trying to parse a YAML file. I was able to parse the file properly but the comments in the YAML file are not getting read. Is there any way to do it? Attaching the parser code and config.json. Also attaching the screenshot of the file and output for reference.
var fs= require('fs');
var path= require('path');
var yaml = require('js-yaml')
var fname= "config.json"
var jPath= path.join(__dirname,"..","ConfigGen","Config",fname);
var jsString= fs.readFileSync(jPath, 'utf8')
// Get path for files from Config file
var tType= "cto" //Get this from input
var pth= JSON.parse(jsString)[tType] //perform error handling
var cType = "jbod" //Get this from input
//Use that path
fs.readdir(pth, function(err,files) {
files.forEach(function(file){
fName= cType+"_"+tType+"_uut.yaml-example";
if(file==fName){
var flContent= fs.readFileSync(path.join(pth,file),"utf8")
// return path.join from here and use the next part in a separate function
var data= yaml.safeLoad(flContent)[0][0]
console.log(data)
for (var index in data){
var prefix = index
for (idx in data[index]){
//console.log(prefix, idx ,data[prefix][idx])
}
}
}
})
})
Reiterating flyx's comment, according to the YAML spec on comments:
Comments are a presentation detail and must not be used to convey content information.
So assuming you're not going to be able to correlate the comments to any adjacent fields, you can just read the whole file as a string and match against any characters after a #
You can read the file and parse with this regex like this:
var { promises: fs } = require('fs');
(async() => {
let path = "./config.yaml"
let file = await fs.readFile(path, "utf8")
let matches = file.matchAll(/#.*/g)
let comments = [...matches].map(m => m[0])
console.log(comments)
})()
If you have a yaml file that looks like this:
# block comment
env: dev
prop: value # inline comment
It will log the following:
[ '# block comment', '# inline comment' ]

How to read special characters from .csv files in JavaScript

I want to read .csv files which contains special characters (polish language).
I'm using ExcelJs to read .csv:
var workbook = new Excel.Workbook();
workbook.csv.readFile(uploadsPath + "/" + filename, {delimiter: ';'})
.then(function (worksheet) {
var worksheet = workbook.getWorksheet(1);
console.log(worksheet.getRow(3).getCell(7).value);
});
}
With this code I'm getting "Wroc�aw" instead of "Wrocław".
I tried using encoding:
var workbook = new Excel.Workbook();
workbook.csv.readFile(uploadsPath + "/" + filename, {encoding: 'utf-16le'})
.then(function (worksheet) {
var worksheet = workbook.getWorksheet(1);
console.log(worksheet.getRow(3).getCell(7).value);
});
}
But then I'm getting this error:
TypeError [ERR_INVALID_ARG_TYPE]: The "buf" argument must be one of type Buffer, TypedArray, or DataView. Received type object
How to deal with it?
Ok, I found a simple solution.
I created function
function changeEncoding(path) {
var buffer = fs.readFileSync(path);
var output = iconv.encode(iconv.decode(buffer, "win1250"), "utf-8");
fs.writeFileSync(path, output);
}
I simply reading file, and with the help of iconv-lite, firstly decoding from win1250 and then saving the file with utf-8 encoding.
First I think ł is a utf-8.
Try printing it in the browser, it may be the console that make it look like this

remove a string from a file using nodejs

I am trying to remove a string from text.txt. text.txt file contains following format of string
text/more/more.txt
text/home.txt
text/more/yoo/yoo.txt
text/about.txt
Now what I am doing is watching a folder and when any of the above listed file, lets say text/about.txt, is deleted then text.txt file should automatically be updated to following
text/more/more.txt
text/home.txt
text/more/yoo/yoo.txt
For this I am using hound module to keep watching for delete event. And replace module to replace deleted path from text.txt file. Below is my code
watcher.on('delete', function(file, stats) {
replace({
regex: /file/g, // file is something like this text/about.txt
replacement: '',
paths: [path + '/text.txt'],
recursive: true,
silent: true,
});
});
But my above code does not remove particular string i.e. file from text.txt file. How can I solve this?
UPDATE
file in above code has this value text/about.txt.
This is a error in semantics, you misinterpreted what happens when you do this:
watcher.on('delete', function(file, stats) {
...
regex: /file/g, // file is something like this text/about.txt
...
}
Here, file in the RegExp object is looking for a string called file, not the actual variable contents of the String object you're passing into the function. Do this instead:
regex: new RegExp(file, 'g'), // file is something like this text/about.txt
See RegExp for more details.
I have updated variable search_content and replace_content to handle special characters also and then using fs module to replace all strings in a file. Also you can run a synchronous loop on a files to replace strings using callbacks.
// Require fs module here.
var search_content = "file";
var replace_content = '';
var source_file_path = '<<source file path where string needs to be replaced>>';
search_content = search_content.replace(/([.?&;*+^$[\]\\(){}|-])/g, "\\$1");//improve
search_content = new RegExp(search_content, "g");
fs.readFile(source_file_path, 'utf8', function (rfErr, rfData) {
if (rfErr) {
// show error
}
var fileData = rfData.toString();
fileData = fileData.replace(search_content, replace_content);
fs.writeFile(source_file_path, fileData, 'utf8', function (wfErr) {
if (wfErr) {
// show error
}
// callback goes from here
});
});

How to convert CSV to JSON in Node.js

I am trying to convert csv file to json. I am using .
Example CSV:
a,b,c,d
1,2,3,4
5,6,7,8
...
Desired JSON:
{"a": 1,"b": 2,"c": 3,"d": 4},
{"a": 5,"b": 6,"c": 7,"d": 8},
...
I tried node-csv parser library.But the output is like array not like I expected.
I'm using Node 0.8 and express.js and would like a recommendation on how to easily accomplish this.
Node.js csvtojson module is a comprehensive nodejs csv parser. It can be used as node.js app library / a command line tool / or browser with help of browserify or webpack.
the source code can be found at: https://github.com/Keyang/node-csvtojson
It is fast with low memory consumption yet powerful to support any of parsing needs with abundant API and easy to read documentation.
The detailed documentation can be found here
Here are some code examples:
Use it as a library in your Node.js application (csvtojson#2.0.0 +):
Install it through npm
npm install --save csvtojson#latest
Use it in your node.js app:
// require csvtojson
var csv = require("csvtojson");
// Convert a csv file with csvtojson
csv()
.fromFile(csvFilePath)
.then(function(jsonArrayObj){ //when parse finished, result will be emitted here.
console.log(jsonArrayObj);
})
// Parse large csv with stream / pipe (low mem consumption)
csv()
.fromStream(readableStream)
.subscribe(function(jsonObj){ //single json object will be emitted for each csv line
// parse each json asynchronousely
return new Promise(function(resolve,reject){
asyncStoreToDb(json,function(){resolve()})
})
})
//Use async / await
const jsonArray=await csv().fromFile(filePath);
Use it as a command-line tool:
sh# npm install csvtojson
sh# ./node_modules/csvtojson/bin/csvtojson ./youCsvFile.csv
-or-
sh# npm install -g csvtojson
sh# csvtojson ./yourCsvFile.csv
For advanced usage:
sh# csvtojson --help
You can find more details from the github page above.
You can try to use underscore.js
First convert the lines in arrays using the toArray function :
var letters = _.toArray(a,b,c,d);
var numbers = _.toArray(1,2,3,4);
Then object the arrays together using the object function :
var json = _.object(letters, numbers);
By then, the json var should contain something like :
{"a": 1,"b": 2,"c": 3,"d": 4}
Had to do something similar, hope this helps.
// Node packages for file system
var fs = require('fs');
var path = require('path');
var filePath = path.join(__dirname, 'PATH_TO_CSV');
// Read CSV
var f = fs.readFileSync(filePath, {encoding: 'utf-8'},
function(err){console.log(err);});
// Split on row
f = f.split("\n");
// Get first row for column headers
headers = f.shift().split(",");
var json = [];
f.forEach(function(d){
// Loop through each row
tmp = {}
row = d.split(",")
for(var i = 0; i < headers.length; i++){
tmp[headers[i]] = row[i];
}
// Add object to list
json.push(tmp);
});
var outPath = path.join(__dirname, 'PATH_TO_JSON');
// Convert object to string, write json to file
fs.writeFileSync(outPath, JSON.stringify(json), 'utf8',
function(err){console.log(err);});
Here is a solution that does not require a separate module. However, it is very crude, and does not implement much error handling. It could also use more tests, but it will get you going. If you are parsing very large files, you may want to seek an alternative. Also, see this solution from Ben Nadel.
Node Module Code, csv2json.js:
/*
* Convert a CSV String to JSON
*/
exports.convert = function(csvString) {
var json = [];
var csvArray = csvString.split("\n");
// Remove the column names from csvArray into csvColumns.
// Also replace single quote with double quote (JSON needs double).
var csvColumns = JSON
.parse("[" + csvArray.shift().replace(/'/g, '"') + "]");
csvArray.forEach(function(csvRowString) {
var csvRow = csvRowString.split(",");
// Here we work on a single row.
// Create an object with all of the csvColumns as keys.
jsonRow = new Object();
for ( var colNum = 0; colNum < csvRow.length; colNum++) {
// Remove beginning and ending quotes since stringify will add them.
var colData = csvRow[colNum].replace(/^['"]|['"]$/g, "");
jsonRow[csvColumns[colNum]] = colData;
}
json.push(jsonRow);
});
return JSON.stringify(json);
};
Jasmine Test, csv2jsonSpec.js:
var csv2json = require('csv2json.js');
var CSV_STRING = "'col1','col2','col3'\n'1','2','3'\n'4','5','6'";
var JSON_STRING = '[{"col1":"1","col2":"2","col3":"3"},{"col1":"4","col2":"5","col3":"6"}]';
/* jasmine specs for csv2json */
describe('csv2json', function() {
it('should convert a csv string to a json string.', function() {
expect(csv2json.convert(CSV_STRING)).toEqual(
JSON_STRING);
});
});
If you want just a command line converter, the quickest and most clean solution for me is to use csvtojson via npx (included by default in node.js)
$ npx csvtojson ./data.csv > data.json
Using ES6
const toJSON = csv => {
const lines = csv.split('\n')
const result = []
const headers = lines[0].split(',')
lines.map(l => {
const obj = {}
const line = l.split(',')
headers.map((h, i) => {
obj[h] = line[i]
})
result.push(obj)
})
return JSON.stringify(result)
}
const csv = `name,email,age
francis,francis#gmail.com,33
matty,mm#gmail.com,29`
const data = toJSON(csv)
console.log(data)
Output
// [{"name":"name","email":"email","age":"age"},{"name":"francis","email":"francis#gmail.com","age":"33"},{"name":"matty","email":"mm#gmail.com","age":"29"}]
Using lodash:
function csvToJson(csv) {
const content = csv.split('\n');
const header = content[0].split(',');
return _.tail(content).map((row) => {
return _.zipObject(header, row.split(','));
});
}
I haven't tried csv package https://npmjs.org/package/csv but according to documentation it looks quality implementation http://www.adaltas.com/projects/node-csv/
I started with node-csvtojson, but it brought too many dependencies for my linking.
Building on your question and the answer by brnd, I used node-csv and underscore.js.
var attribs;
var json:
csv()
.from.string(csvString)
.transform(function(row) {
if (!attribs) {
attribs = row;
return null;
}
return row;
})
.to.array(function(rows) {
json = _.map(rows, function(row) {
return _.object(attribs, row);
});
});
I have a very simple solution to just print json from csv on console using csvtojson module.
// require csvtojson
var csv = require("csvtojson");
const csvFilePath='customer-data.csv' //file path of csv
csv()
.fromFile(csvFilePath)``
.then((jsonObj)=>{
console.log(jsonObj);
})
I have used csvtojson library for converting csv string to json array.
It has variety of function which can help you to convert to JSON.
It also supports reading from file and file streaming.
Be careful while parsing the csv which can contain the comma(,) or any other delimiter .
For removing the delimiter please see my answer here.
Step 1:
Install node module:
npm install csvtojson --save
Step 2:
var Converter = require("csvtojson").Converter;
var converter = new Converter({});
converter.fromFile("./path-to-your-file.csv",function(err,result){
if(err){
console.log("Error");
console.log(err);
}
var data = result;
//to check json
console.log(data);
});
Node-ETL package is enough for all BI processing.
npm install node-etl;
Then :
var ETL=require('node-etl');
var output=ETL.extract('./data.csv',{
headers:["a","b","c","d"],
ignore:(line,index)=>index!==0, //ignore first line
});
Me and my buddy created a web service to handle this kind of thing.
Check out Modifly.co for instructions on how to transform CSV to JSON with a single RESTful call.
Use csv parser library, I'm explaining in more details how to use it here .
var csv = require('csv');
csv.parse(csvText, {columns: true}, function(err, data){
console.log(JSON.stringify(data, null, 2));
});
npm install csvjson --save
In you Node JS File
const csvjson = require('csvjson');
convertCSVToJSON(*.csv);
convertCSVToJSON = (file) => {
const convertedObj = csvjson.toObject(file);
}
csvtojson module is a comprehensive nodejs csv parser to convert csv to json or column arrays. It can be used as node.js library / command line tool / or in browser. Below are some features:
/** csv file
a,b,c
1,2,3
4,5,6
*/
const csvFilePath='<path to csv file>'
const csv=require('csvtojson')
csv()
.fromFile(csvFilePath)
.then((jsonObj)=>{
console.log(jsonObj);
/**
* [
* {a:"1", b:"2", c:"3"},
* {a:"4", b:"5". c:"6"}
* ]
*/
})
// Async / await usage
const jsonArray=await csv().fromFile(csvFilePath);
I converted a large (315 MB) csv file to json by installing the csvtojson module and then using the below code:
const fs = require('fs')
const Converter = require('csvtojson').Converter
const csvConverter = new Converter({
constructResult:false,
downstreamFormat:"array",
})
csvConverter.subscribe=function(json,row,index){
json["rowIndex"]=index
};
const readStream = fs.createReadStream('./data.csv') // my csv file
const writeStream = fs.createWriteStream('./data.json') // my new json file
readStream.pipe(csvConverter).pipe(writeStream)
The resulting json file is in the desired format:
[
{"a": 1,"b": 2,"c": 3,"d": 4},
{"a": 5,"b": 6,"c": 7,"d": 8},
]
Once figured out how to csv data into two dimention array:
[['header1','header2'],['data1','data2']]
Convert to json is simply map and reduce:
const keys = input[0]
const jsonOutput = input.slice(1)
.map(arr2 => keys.reduce((accumulator, element, index) => {
return { ...accumulator,
[element]: arr2[index]
};
}, {}))
In my case JSON.stringify didn't help as the files where too big.
This solved my needs:
let csvFile = fs.readFileSync(
csvFilePath,
{ encoding: "utf-8" },
function (err) {
console.log(err);
}
);
csvFile = csvFile.split("\n");
let strFile = "export default [";
csvFile.forEach(function (d) {
let row = d.split(",");
strFile += `[${row}],`;
});
strFile += "]";

How to append to New Line in Node.js

I'm trying to Append data to a Log file using Node.js and that is working fine but it is not going to the next line. \n doesn't seem to be working in my function below. Any suggestions?
function processInput ( text )
{
fs.open('H://log.txt', 'a', 666, function( e, id ) {
fs.write( id, text + "\n", null, 'utf8', function(){
fs.close(id, function(){
console.log('file is updated');
});
});
});
}
It looks like you're running this on Windows (given your H://log.txt file path).
Try using \r\n instead of just \n.
Honestly, \n is fine; you're probably viewing the log file in notepad or something else that doesn't render non-Windows newlines. Try opening it in a different viewer/editor (e.g. Wordpad).
Use the os.EOL constant instead.
var os = require("os");
function processInput ( text )
{
fs.open('H://log.txt', 'a', 666, function( e, id ) {
fs.write( id, text + os.EOL, null, 'utf8', function(){
fs.close(id, function(){
console.log('file is updated');
});
});
});
}
use \r\n combination to append a new line in node js
var stream = fs.createWriteStream("udp-stream.log", {'flags': 'a'});
stream.once('open', function(fd) {
stream.write(msg+"\r\n");
});
Alternatively, you can use fs.appendFile method
let content = 'some text';
content += "\n";
fs.appendFile("helloworld.txt", content, (err) => {
return console.log(err);
});
Try:
var fs =require('fs');
const details=require('./common');
var info=JSON.stringify(details);
const data=fs.writeFileSync('./tmp/hello/f1.txt',`\n${info}`,{'flag':'a'},function(err,data){
if(err) return console.error("error",error);
console.log(data);
});
//steps to exceute
1.Install all the required modules(ie fs is required here).
2.Here (.common) files has json object which i was importing from another file.
3.then import the file and store in details variable.
4.While performing operations json data has to be converted into string format (so JSON.stringify).
5.WriteFileSync (its an synchronous function)
6.once function execution is completed response is returned.
7.store response in data variable and print in console.log

Categories

Resources