How to save data in json format? - javascript

I created a program that receives data from four channels.
This is the type of data.
( 0.499147;0.499147;0.499147;0.499147; )
(Separated by ';') Data can be read every interval set up to 100 times.
So I want to save the received data in JSON format.
I wrote the following code but could not save it in JSON format.
port.on('data',function(devicevalue){
arrayvalue = devicevalue.toString();
eachvalue = arrayvalue.split(';');
ch0value = eachvalue[0];
ch1value = eachvalue[1];
ch2value = eachvalue[2];
ch3value = eachvalue[3];
var json_data =
{
index : i ,
ch0value : eachvalue[0],
ch1value : eachvalue[1],
ch2value : eachvalue[2],
ch3value : eachvalue[3],
};
Later if i write this code
jsonfile.writeFile(file,json_data,{flag: 'a', spaces: 2},function(err){
console.error(err)
});
[https://i.stack.imgur.com/DMc5M.png][https://i.stack.imgur.com/DMc5M.png]
If i write this instead of the above
var objjson = JSON.stringify(json_data);
jsonfile.writeFile(file,objjson,{flag: 'a', spaces: 2},function(err){
console.error(err)
});
[ https://i.stack.imgur.com/8ldPG.png][https://i.stack.imgur.com/8ldPG.png]
If I want this form, how do I write the code?
[https://i.stack.imgur.com/nEr3e.png][https://i.stack.imgur.com/nEr3e.png]

How about this:
var results = [];
var i = 0;
port.on('data', function(devicevalue) {
var newElement = {
index: i,
ch0value: eachvalue[0],
ch1value: eachvalue[1],
ch2value: eachvalue[2],
ch3value: eachvalue[3],
ch4value: eachvalue[4]
};
results.push(newElement);
i++;
});
port.on('close', function(code, signal) {
jsonfile.writeFile(file,results,{flag: 'a', spaces: 2}, console.log);
});

Try using toJS().
example: var.toJS();
You can also try toJSON. Same concept, but toJS returns a deeply copied version of the object.

Related

csvtojson node.js (combine two codes)

How to combine these two codes, so it doesn't just covert csv to Json (first code), but also save this as an json array in an extra file?(second code)
this (first) code converts csv file to json array:
const fs = require("fs");
let fileReadStream = fs.createReadStream("myCsvFile.csv");
let invalidLineCount = 0;
const csvtojson = require("csvtojson");
csvtojson({ "delimiter": ";", "fork": true })
.preFileLine((fileLineString, lineIdx)=> {
let invalidLinePattern = /^['"].*[^"'];/;
if (invalidLinePattern.test(fileLineString)) {
console.log(`Line #${lineIdx + 1} is invalid, skipping:`, fileLineString);
fileLineString = "";
invalidLineCount++;
}
return fileLineString
})
.fromStream(fileReadStream)
.subscribe((dataObj) => {
console.log(dataObj);
// I added the second code hier, but it wirtes the last object of the array (because of the loop?)
}
});
and this (second) code saves the json array to an external file:
fs.writeFile('example.json', JSON.stringify(dataObj, null, 4);
The quistion is how to put the second codes into the first code (combine them)?
You can use .on('done',(error)=>{ ... }) method. (csvtojson). Push the data into a variable in subscribe method and write the data as JSON in .on('done'). (test was successful).
Check it out:
let fileReadStream = fs.createReadStream("username-password.csv");
let invalidLineCount = 0;
let data = []
csvtojson({ "delimiter": ";", "fork": true })
.preFileLine((fileLineString, lineIdx)=> {
let invalidLinePattern = /^['"].*[^"'];/;
if (invalidLinePattern.test(fileLineString)) {
console.log(`Line #${lineIdx + 1} is invalid, skipping:`, fileLineString);
fileLineString = "";
invalidLineCount++;
}
return fileLineString
})
.fromStream(fileReadStream)
.subscribe((dataObj) => {
// console.log(dataObj)
data.push(dataObj)
})
.on('done',(error)=>{
fs.writeFileSync('example.json', JSON.stringify(data, null, 4))
})
Not sure if you are able to change the library but I would definitely recommend Papaparse for this - https://www.npmjs.com/package/papaparse
Your code would then look something like this:
const fs = require('fs'), papa = require('papaparse');
var readFile = fs.createReadStream(file);
papa.parse(readFile, {
complete: function (results, file) {
fs.writeFile('example.json', JSON.stringifiy(results.data), function (err) {
if(err) console.log(err);
// callback etc
})
}
});

Pass array values as parameter to function and create json data

I have a scenario where I am passing an array of objects to a function in nodejs, but the same is failing with undefined error.
Here is what I have tried :
var object = issues.issues //json data
var outarr=[];
for(var key in object){
outarr.push(object[key].key)
}
console.log(outarr) // array is formed like this : ['a','b','c','d','e']
for(var i =0; i<outarr.length;i++){
jira.findIssue(outarr[i]) //here I am trying to pass the array objects into the loop one by one
.then(function(issue) {
var issue_number = issue.key
var ape = issue.fields.customfield_11442[0].value
var description = issue.fields.summary
var ice = issue.fields.customfield_15890[0].value
var vice = issue.fields.customfield_15891.value
var sor = issue.fields.labels
if (sor.indexOf("testcng") > -1) {
var val = 'yes'
} else {
var val = 'yes'
}
var obj = {};
obj['ape_n'] = ape;
obj['description_n'] = description;
obj['ice_n'] = ice;
obj['vice_n'] = vice;
obj['sor_n'] = val;
var out = {}
var key = item;
out[key] = [];
out[key].push(obj);
console.log(out)
} })
.catch(function(err) {
console.error(err);
});
});
What I am trying to achieve : I want to pass the array values as a parameter which is required by jira.findissue(bassically passing the issue number) one by one and which should again fetch the values and give a combine json output.
How can I pass this array values one by one in this function and also run jira.findissue in loop.
Any help will be great !! :-)
I have taken a look at the code in your question.
To be honest the code you wrote is messy and contains some simple syntax errors.
A good tip is to use a linter to avoid those mistakes.
More info about linters here: https://www.codereadability.com/what-are-javascript-linters/
To output all results in one array you have to define the array outside the scope of the loop.
I cleaned the code a bit up and use some es6 features. I don't know the context of the code but this is what I can make off it:
//map every value the key to outarr
let outarr = issues.issues.map( elm => elm.key);
//Output defined outside the scope of the loop
let output = [];
//looping outarr
outarr.forEach( el => {
jira.findIssue(el).then(issue => {
//creating the issue object
let obj = {
ape_n: issue.fields.customfield_11442[0].value,
description_n: issue.fields.summary,
ice_n: issue.fields.customfield_15890[0].value,
vice_n: issue.fields.customfield_15891.value,
sor_n: issue.fields.labels.indexOf("testcng") > -1 ? "yes" : "yes",
};
//pushing to the output
output[issue.key] = obj;
}).catch(err => {
console.log(err);
});
});
//ouputing the output
console.log(output);
Some more info about es6 features: https://webapplog.com/es6/

MongoDB Node JS - to delete an entry from an object inside a document object

I'm trying to create a command in Node JS using native mongodb driver, to remove the key value pair from an object which is inside the document object.
I have a mongoDB collection in the following format:
{
"name" : "PrakashPM"
"data" : {
"Jan-2017" : "2,3,1",
"Dec-2016" : "1,2,0",
"Nov-2016" : "9,9,9"
}
},
{
"name" : "Valavan"
"data" : {
"Jan-2017" : "1,1,1",
"Dec-2016" : "3,3,3",
"Nov-2016" : "9,9,9"
}
}
My target is to remove "Dec-2016" : "1,2,0" which is inside "name" :
"PrakashPM"
My Code:
var mongoName = 'PrakashPM';
var mongoDate = "'data'.'Dec-2016'";
// TRIALS
// var mongoDate = "data.'Dec-2016'";
// var mongoDate = "data.Dec-2016";
var mongoVal = "'1,2,0'";
// TRIALS
// var mongoVal = "1,2,0";
mycollection.update( { name: mongoName },
{ $unset: {mongoDate : mongoVal} }
);
NOTE: I'm doing the above operations inside a PUT request function.
I tried many possible ways (TRIALS) for the input values (mongoDate, mongoVal) but I'm not able to achieve the result below.
Also, is it possible to remove the key value pair entry, just by using the key? (i.e. in this case {$unset: {mongoDate}} or something like that)
EXPECTED RESULT:
{
"name" : "PrakashPM"
"data" : {
"Jan-2017" : "2,3,1",
"Nov-2016" : "9,9,9"
}
},
{
"name" : "Valavan"
"data" : {
"Jan-2017" : "1,1,1",
"Dec-2016" : "3,3,3",
"Nov-2016" : "9,9,9"
}
}
Assuming that req.body.timerDate has the month-date string value exactly as in MongoDB, this should work. (See documentation).
You have to use string as key. You cannot use variable names there.
// Assuming that req.body.timerDate
// has the month-date as stored in MongoDB (case-sensitive match)
var reqDate = "data." + req.body.timerDate;
var reqName = req.body.name;
var _unset = {};
_unset[reqDate] = "";
mycollection.update({ name: reqName }, { $unset: _unset })
Use the following example as a guide to updating your collection. You need to use the bracket notation to create your query and update documents i.e. you require an update operation which has the structure:
db.mycollection.update(
{ 'name': 'PrakashPM' },
{
'$unset': {
'data.Dec-2016': ''
}
}
)
So, using the variables to construct the objects to use in your operation
var mongoName = 'PrakashPM';
var timerDate = 'Dec-2016';
var query = {};
var update = {'$unset': {}};
query['name'] = mongoName;
update['$unset']['data.'+timerDate] = '';
db.mycollection.update(query, update)
You are trying to use variables as keys in a object.
mycollection.update( { timerName: mongoName },
{ $unset: {mongoDate : mongoVal} }
);
This does not work as you expect and is a general JavaScript concept (not mongodb problem).
You are sending a query to mongo to update a row where the key "timerName" equals the content of the variable "mongoName".
But the proper key is "name".
Try this:
mycollection.update( { name: mongoName },
{ $unset: {data : mongoVal} }
);

how to get left side data of a json dynamically

I have a json which is like
{
"Payload":[{
"PrevYr":"21333",
"CurYr":"123454"
},{
"PrevYr":"234333",
"CurYr":"45546"
},{
"PrevYr":"3563",
"CurYr":"67854"
}]
}
Now in my code i read this json using
$.getJSON("readJson.json", function (data) {}
I need to read the json and create another json with some changes on it. But i can't read the "CurYr" and "PrevYr". I mean not their values. But them. So if there is 2014 and 2015 in place of curyr and prevyr i can get them also. I need to read the left side. Please help..
Try something like this,
var test = {
"Payload":[{
"PrevYr":"21333",
"CurYr":"123454"
},{
"PrevYr":"234333",
"CurYr":"45546"
},{
"PrevYr":"3563",
"CurYr":"67854"
}]};
$.each(test.Payload,function(key,val){
$.each(val,function(key1,val1){
alert(key1);
});
});
or
var keySet = Object.keys( test.Payload[0] );
alert( keySet[0]);
alert( keySet[1]);
you can use Object.keys( obj );
try
$.getJSON("readJson.json", function (data) {
console.log( Object.keys(data.Payload[0]));
}
try this:
var obj = {
"Payload":[{
"PrevYr":"21333",
"CurYr":"123454"
},{
"PrevYr":"234333",
"CurYr":"45546"
},{
"PrevYr":"3563",
"CurYr":"67854"
}]
}
var objKeys = Object.keys(obj);
objKeys.forEach(function(key){
// This returns "Payload"
var _innerKeys = Object.keys(obj[key]);
// This returns "0,1,2" since obj[key] is an array.
_innerKeys.forEach(function(k){
// This will return "PrevYr, CurYr"
var _innerProp = Object.keys(obj[key][k]);
console.log(_innerProp);
})
console.log("Inner Keys: ", _innerKeys);
})

Convert a text from text file to array with fs [node js]

I have a txt file contains:
{"date":"2013/06/26","statement":"insert","nombre":1}
{"date":"2013/06/26","statement":"insert","nombre":1}
{"date":"2013/06/26","statement":"select","nombre":4}
how I can convert the contents of the text file as array such as:
statement = [
{"date":"2013/06/26","statement":"insert","nombre":1},
{"date":"2013/06/26","statement":"insert","nombre":1},
{"date":"2013/06/26","statement":"select","nombre":4}, ];
I use the fs module node js. Thanks
Sorry
I will explain more detailed:
I have an array :
st = [
{"date":"2013/06/26","statement":"insert","nombre":1},
{"date":"2013/06/26","statement":"insert","nombre":5},
{"date":"2013/06/26","statement":"select","nombre":4},
];
if I use this code :
var arr = new LINQ(st)
.OrderBy(function(x) {return x.nombre;})
.Select(function(x) {return x.statement;})
.ToArray();
I get the result I want.
insert select insert
but the problem my data is in a text file.
any suggestion and thanks again.
There is no reason for not to do your file parser yourself. This will work on any size of a file:
var fs = require('fs');
var fileStream = fs.createReadStream('file.txt');
var data = "";
fileStream.on('readable', function() {
//this functions reads chunks of data and emits newLine event when \n is found
data += fileStream.read();
while( data.indexOf('\n') >= 0 ){
fileStream.emit('newLine', data.substring(0,data.indexOf('\n')));
data = data.substring(data.indexOf('\n')+1);
}
});
fileStream.on('end', function() {
//this functions sends to newLine event the last chunk of data and tells it
//that the file has ended
fileStream.emit('newLine', data , true);
});
var statement = [];
fileStream.on('newLine',function(line_of_text, end_of_file){
//this is the code where you handle each line
// line_of_text = string which contains one line
// end_of_file = true if the end of file has been reached
statement.push( JSON.parse(line_of_text) );
if(end_of_file){
console.dir(statement);
//here you have your statement object ready
}
});
If it's a small file, you might get away with something like this:
// specifying the encoding means you don't have to do `.toString()`
var arrayOfThings = fs.readFileSync("./file", "utf8").trim().split(/[\r\n]+/g).map(function(line) {
// this try/catch will make it so we just return null
// for any lines that don't parse successfully, instead
// of throwing an error.
try {
return JSON.parse(line);
} catch (e) {
return null;
}
// this .filter() removes anything that didn't parse correctly
}).filter(function(object) {
return !!object;
});
If it's larger, you might want to consider reading it in line-by-line using any one of the many modules on npm for consuming lines from a stream.
Wanna see how to do it with streams? Let's see how we do it with streams. This isn't a practical example, but it's fun anyway!
var stream = require("stream"),
fs = require("fs");
var LineReader = function LineReader(options) {
options = options || {};
options.objectMode = true;
stream.Transform.call(this, options);
this._buffer = "";
};
LineReader.prototype = Object.create(stream.Transform.prototype, {constructor: {value: LineReader}});
LineReader.prototype._transform = function _transform(input, encoding, done) {
if (Buffer.isBuffer(input)) {
input = input.toString("utf8");
}
this._buffer += input;
var lines = this._buffer.split(/[\r\n]+/);
this._buffer = lines.pop();
for (var i=0;i<lines.length;++i) {
this.push(lines[i]);
}
return done();
};
LineReader.prototype._flush = function _flush(done) {
if (this._buffer.length) {
this.push(this._buffer);
}
return done();
};
var JSONParser = function JSONParser(options) {
options = options || {};
options.objectMode = true;
stream.Transform.call(this, options);
};
JSONParser.prototype = Object.create(stream.Transform.prototype, {constructor: {value: JSONParser}});
JSONParser.prototype._transform = function _transform(input, encoding, done) {
try {
input = JSON.parse(input);
} catch (e) {
return done(e);
}
this.push(input);
return done();
};
var Collector = function Collector(options) {
options = options || {};
options.objectMode = true;
stream.Transform.call(this, options);
this._entries = [];
};
Collector.prototype = Object.create(stream.Transform.prototype, {constructor: {value: Collector}});
Collector.prototype._transform = function _transform(input, encoding, done) {
this._entries.push(input);
return done();
};
Collector.prototype._flush = function _flush(done) {
this.push(this._entries);
return done();
};
fs.createReadStream("./file").pipe(new LineReader()).pipe(new JSONParser()).pipe(new Collector()).on("readable", function() {
var results = this.read();
console.log(results);
});
fs.readFileSync("myfile.txt").toString().split(/[\r\n]/)
This gets your each line as a string
You can then use UnderscoreJS or your own for loop to apply the JSON.parse("your json string") method to each element of the array.
var arr = fs.readFileSync('mytxtfile', 'utf-8').split('\n')
I think this is the simplest way of creating an array from your text file

Categories

Resources