I am trying to add all numbers from an column to an variable. The Problem is my code is adding the String to which results into NaN.
var csvData=[];
let test = 0;
var parser = parse({delimiter: ','}, function(err, data){
});
fs.createReadStream(__dirname+'/test2.csv','utf16le').pipe(parser)
.on('data', function(csvrow) {
csvData.push(csvrow);
test = test + (csvrow[2]);
})
.on('end',function() {
console.log(test)
});
gives me : "0Daily Device Installs00001000101100" and if I add parseInt(csvrow[2]) I will get NaN for test.
My goal is to add all numbers after Daily Device Installs, what am I missing?
I did a bit research on Node.js CSV package.
Use the header
If your CSV file contains a header row as supposed in comment by GrafiCode, like in this example:
"Day","Daily Device Installs"
"2021-09-15",1
"2021-09-16",1
Then CSV Parser has a feature to use the header row with column-names.
See the columns option.
Benefit:
log the header
map the column-names (for simple use in code)
use it to make your code clean & expressive
defend against changes of column-order inside the input CSV
var csvData=[];
let test = 0;
// options: use default delimiter comma and map header
let parser = parse({
columns: header =>
header.map( column => {
console.log(column);
// could also map (e.g. similar to Snake_Case)
return column.replace(/ /g,"_");
})
}
function addToCounter(value) {
if (!isNaN(value))
console.log("WARN: not a number: ", value)
return;
test += value;
}
// read from file
fs.createReadStream(__dirname+'/test2.csv','utf16le').pipe(parser)
.on('data', function(csvrow) {
csvData.push(csvrow);
addToCounter(csvrow.Daily_Device_Installs); // the column name as mapped with underscore
})
.on('end',function() {
console.log(test)
});
Note:
I extracted the counter-increment to a function.
Your csvData array now contains for each row an object (with column-names as keys) instead an array of columns.
try
if (!isNaN(csvrow[2])) test += +csvrow[2];
Related
I have a CSV file where for a row some value is missing. So if a value is missing then we need to delete that row from the CSV file. I am facing a problem with doing that. Please help me with this.
We can use a CSV parsing library, such as the excellent Papa Parse to parse the data, then we can filter the rows based on the column that we wish to filter on.
For example:
const Papa = require('papaparse');
let csvData = `Col1,Col2,Col3\na1,b1,c1\na2,,c2\na3,b3,c3`;
let { data } = Papa.parse(csvData, { header: true });
console.log("Original csv data:");
console.log(csvData);
function filterEmptyValues(data, column) {
return data.filter(row => row[column]);
}
let filteredData = filterEmptyValues(data, "Col2");
let filteredCsv = Papa.unparse(filteredData);
console.log("\nFiltered csv:")
console.log(filteredCsv);
A .frd file is a type of multi-column numeric data table used for storing information about the frequency response of speakers. A .frd file looks something like this when opened in a text editor:
2210.4492 89.1 -157.7
2216.3086 88.99 -157.7
2222.168 88.88 -157.6
2228.0273 88.77 -157.4
Using javascript, is there a way that I can parse this data in order to return each column separately?
For example, from the .frd file above, I would need to return the values like so:
var column1 = [2210.4492, 2216.3086, 2222.168, 2228.0273];
var column2 = [89.1, 88.99, 88.88, 88.77];
var column3 = [-157.7, -157.7, -157.6, -157.4];
I'm not exactly sure where to begin in trying to achieve this, so any step in the right direction would be helpful!
I found the following description of the FRD file format and I will follow it.
Let's assume that the content of your .frd file is in the variable called content (the following example is for Node.js):
const fs = require('fs');
const content = fs.readFileSync('./input.frd').toString();
Now if content has your FRD data, it means it's a set of lines, each line contains exactly three numbers: a frequency (Hz), a level (dB), and a phase (degrees). To split your content into lines, we can just literally split it:
const lines = content.split(/\r?\n/);
(normally, splitting just by '\n' would've worked, but let's explicitly support Windows-style line breaks \r\n just in case. The /\r?\n/ is a regular expression that says "maybe \r, then \n")
To parse each line into three numbers, we can do this:
const values = line.split(/\s+/);
If the file can contain empty lines, it may make sense to double check that the line has exactly three values:
if (values.length !== 3) {
// skip this line
}
Given that we have three values in values, as strings, we can assign the corresponding variables:
const [frequency, level, phase] = values.map(value => Number(value));
(.map converts all the values in values from strings to Number - let's do this to make sure we store the correct type).
Now putting all those pieces together:
const fs = require('fs');
const content = fs.readFileSync('./input.frd').toString();
const frequencies = [];
const levels = [];
const phases = [];
const lines = content.split(/\r?\n/);
for (const line of lines) {
const values = line.split(/\s+/);
if (values.length !== 3) {
continue;
}
const [frequency, level, phase] = values.map(value => Number(value));
frequencies.push(frequency);
levels.push(level);
phases.push(phase);
}
console.log(frequencies);
console.log(levels);
console.log(phases);
The main code (the one that works with content) will also work in browser, not just in Node.js, if you need that.
This code can be written in a tons of different ways, but I tried to make it easier to explain so did something very straightforward.
To use it in Node.js (if your JavaScript file is called index.js):
$ cat input.frd
2210.4492 89.1 -157.7
2216.3086 88.99 -157.7
2222.168 88.88 -157.6
2228.0273 88.77 -157.4
$ node index.js
[ 2210.4492, 2216.3086, 2222.168, 2228.0273 ]
[ 89.1, 88.99, 88.88, 88.77 ]
[ -157.7, -157.7, -157.6, -157.4 ]
I am new to javascript and struggling to come up with a solution. I have got a file that contains lines in JSON format. In the below code I have converted the JSON to objects and trimmed the white space on each line and returned the output.
Now, I need to not only remove the white spaces but to even search for a string(provided by the user and passed in as a variable) in each line of the file and if the string found it should return the entire line.
I tried .includes(req.params.msg) but couldn't get to right.
get(req, res) {
let arry = [];
const text = (fs.readFileSync('./pretty.out'));
arry = (text.toString().split('\n'));
let wat = [];
arry.forEach(i => {
if (!!i.trim()) {
wat.push(JSON.parse(i));
}
});
res.json(wat);
}
File's content will be,
{"foo" : "bar","bar" : "sit"}
{"foo" : "lorem","bar" : "ipsum"}
{"foo" : "dolor","bar" : "amet"}
If the user inputs sit then the output should be,
{"foo" : "bar","bar" : "sit"}
// using fs.readFileSync is fine, but only during process boot time
// if the data is dynamic you'll need to read it async
const data = (fs.readFileSync('./pretty.out'), 'utf8')
.split('\n')
.filter(line => line.trim())
.map(line => JSON.parse(line));
// input should use post requests
post(req, res) {
const query = req.body;
let results = data.filter(d => d.bar === query);
res.json(results);
}
Looks like you've got it in array format now, so this should work fine:
// To return multiple query matches:
function search(query){
return arr.filter(function(item){
return JSON.stringify(item).match(query) !== null;
})
}
// To return single (first) query match:
function search(query){
return arr.find(function(item){
return JSON.stringify(item).match(query) !== null;
})
}
If you're not converting it to an array of json object you can use:
function search(query){
return file.match(new RegExp("{.+"+query+".+}", "g"))
}
Here's a fiddle:
https://jsfiddle.net/1cqacj3b/11/ (modified to show results in html
I'm quite new at using node.js. Right now I'm trying to pull data from MongoDB and display it in a table using Javascript + HTML. However, my table is populating with undefined in all the fields. I think something is definitely wrong with how I'm reading data through to the Javascript function b/c I am able to render the full results from the people.js file straight to the webpage. Thank you in advance!! Below is my code:
Code for my people.js file:
exports.getPeople = (req, res) => {
People.find((err, docs) => {
if (err) { return next(err); }
if (docs != null){
console.log(docs.length)
docs.forEach(function(docs, index) {
console.log(index + " key: " + docs.name)
});
res.render('people', { people: docs });
}
else{
res.render('people', { people: docs() });
}
});
};
My Javascript + HTML that's populating my webpage.
script(type='text/javascript', src='http://code.jquery.com/jquery-1.9.1.js', charset='UTF-8')
script.
$(document).ready(function(){
var obj= '$(people)'
var tbl = "<table>"
var content="";
for(i=0; i<obj.length;i++){
content +=
'<tr>+<td>' +obj[i]["name"]+
'</td><td>'+obj[i]["type"]+
'</td><td>'+obj[i]["min_hours"]+
'</td><td>'+obj[i]["max_hours"]+
'</td><td>'+obj[i]["email"]+
'</td><td>'+obj[i]["phone_number"]+
'</td><td>'+ '<input type="button" value = "Update" onClick="Javacsript:deleteRow(this)">' +
'</td><td>'+'<input type="button" value = "Delete" onClick="Javacsript:deleteRow(this)">';
'</td></tr>';
}
content += "</table>"
$('#myTableData').append(content);
});
As you mentioned, you can render the array results from the people.js file directly into the webpage. So, you don't have to read the data through a JavaScript function using jQuery. The template engine language is built on top of JavaScript and it supports plenty of methods and features to do what you're trying to achieve here. So, for example, you may use an iteration method like each..in to build your table (see docs - Iteration):
// ...
body
table(id="myTableData")
// for each person in the people array (from people.js) ...
each person in people
// build a new table row
tr
// insert table data
td #{person.name}
td #{person.type}
td #{person.min_hours}
td #{person.max_hours}
td #{person.email}
td #{person.phone_number}
// add the update and delete buttons
td
input(type="button" value = "Update" onclick=" ... ")
input(type="button" value = "Delete" onclick=" ... ")
// move to next person in the people array ...
The Problem
var obj = '$(people)' does not work as you may expect. You want obj to hold the people array from the people.js file so that you can loop over each object in the array, but this is not what's happening. obj is actually a string value of length 9, so the for loop evaluates 9 string values (not objects). This is why all of your fields are undefined.
To see what I mean, run this code snippet:
var obj = '$(people)';
for (var i = 0; i < obj.length; i++){
console.log(obj[i]);
console.log(obj[i]["name"]);
}
The reason $(people) does not evaluate to an object is mainly because the parent element, script. causes everything below it to evaluate to plain text. The . after the tag causes the template engine to render plain text (see docs: Block in a Tag).
If you wanted to assign people to obj in your inline script you may try it this way:
script
| var obj = #{people};
But this will cause an Unexpected identifier JavaScript error because of the _id field on each item in people. By default _id is an ObjectID hex value from MongoDb so you would have to either remove the _id field from the docs or add quotes to each doc._id so it evaluates to a string. This would all have to be done in person.js before you return the data.
To see what I mean about the Unexpected identifier error, run this code snippet:
// works
var obj = { _id: '583ab33cdaf857b543c76afe',
name: 'john'};
// Error: Unexpected identifier
var obj = { _id: 583ab33cdaf857b543c76afe,
name: 'john'};
I'm querying some MDB files in nodejs on linux using MDBTools, unixodbc and the node odbc package.
Using this code
db.query("select my_str_col, my_dbl_col from my_table", function (err, rows) {
if (err) return console.log(err);
console.log(rows);
db.close();
});
I can query the my_str_col string column but I can't decipher the my_dbl_col Double column, I get something like this :
[ { my_str_col: 'bla', my_dbl_col: '{\u0014�Gai�#' },
{ my_str_col: 'bla bla', my_dbl_col: '' },
{ my_str_col: 'bla', my_dbl_col: '�G�z\u0014NF#' } ]
All not empty strings are 7 or 8 bytes but what bothers me most is the second row of this example where I get an empty string while I know there is a not null number in the MDB : it means I can't try to build the numbers from the string bytes.
So, how can I read numbers of type Double in a MDB file in node on linux ?
I precise that
a tool like MDBViewer (using MDBTools) correctly reads those numbers
JavaScript numbers will be precise enough for me : those numbers would all fit in float32
I can't apply lengthy conversions on the MDB files : I must make fast queries on a few hundred frequently changed files...
a solution in which I can't really issue queries but which lets me read the whole table would be acceptable too
As I couldn't get node-odbc to correctly decipher numbers, I wrote a function calling mdb-export (which is very fast) and reading the whole table.
var fs = require("fs"),
spawn = require('child_process').spawn,
byline = require('byline'); // npm install byline
// Streaming reading of choosen columns in a table in a MDB file.
// parameters :
// args :
// path : mdb file complete path
// table : name of the table
// columns : names of the desired columns
// read : a callback accepting a row (an array of strings)
// done : an optional callback called when everything is finished with an error code or 0 as argument
function queryMdbFile(args, read, done) {
var cmd = spawn('/usr/bin/mdb-export', [args.path, args.table]);
var rowIndex = 0, colIndexes;
byline(cmd.stdout).on('data', function (line) {
var cells = line.toString().split(',');
if (!rowIndex++) { // first line, let's find the col indexes
var lc = function(s){ return s.toLowerCase() };
colIndexes = args.columns.map(lc).map(function(name) {
return cells.map(lc).indexOf(name);
});
} else { // other lines, let's give to the callback the required cells
read(colIndexes.map(function(index){ return ~index ? cells[index] : null }));
}
});
cmd.on('exit', function (code) {
if (done) done(code);
});
}
Here's an example in which I build an array with all rows of the question's example :
var rows = [];
queryMdbFile({
path: "mydatabase.MDB",
table: 'my_table',
columns : ['my_str_col', 'my_dbl_col']
},function(row) {
rows.push(row);
},function(errorCode) {
console.log(errorCode ? ('error:'+errorCode) : 'done');
});
Everything is read as strings but easy to parse :
[ ['bla', '1324' ],
['bla bla', '332e+5'],
['bla', '43138' ] ]
Surprisingly enough, this is faster than querying using node-odbc and linuxodbc.