I have a problem in NodeJS about file handling - javascript

I'm not able to solve following file Handling problem in NodeJS:I have a file emp.txt that contains employee data in fixed record size in the following format:
EmpID:Name:Dept:Salary
1001:Harry:Sales:23000
1002:Sarita:Accounts:20000
1003:Monika:TechSupport:35000
Read the file. Display sum of salary of all employees
I have tried following code to read file successfully, but not getting logic to solve the exact problem. My code to read File:
var fs = require("fs");
console.log("Going to read file!");
fs.readFile('emp.txt', function(err, data){
if(err){
return console.error(err);
}
console.log(data.toString().split(":"));
console.log("read Successfully");
})
What is the correct logic to read Salary field from emp.txt and calculate it's sum?

First you have to split the new lines (\n) in your text file. Then loop through each row and get the total:
var fs = require("fs");
console.log("Going to read file!");
let totalSalary = 0;
fs.readFile('emp.txt', function(err, data){
if(err){
return console.error(err);
}
const dataRows = data.toString().split("\n");
for (let index=0; index < dataRows.length; index++){
if (index > 0){
let empData = dataRows[index].split(":");
totalSalary += parseInt(empData[3]);
}
}
console.log(totalSalary);
console.log("read Successfully");
})
Repl.it Link : https://repl.it/#h4shonline/ImpressionableRadiantLogic

Why dont you:
read the file line by line. https://nodejs.org/api/readline.html#readline_example_read_file_stream_line_by_line
remove the spaces
split the line by ":"
get the last element
Convert to Number()
Check if its a Number
add to sum
Something like this:
const fs = require('fs');
const readline = require('readline');
async function processLineByLine() {
const fileStream = fs.createReadStream('emp.txt');
const rl = readline.createInterface({
input: fileStream,
crlfDelay: Infinity
});
// Note: we use the crlfDelay option to recognize all instances of CR LF
// ('\r\n') in input.txt as a single line break.
let sum = 0;
for await (let line of rl) {
// Each line in input.txt will be successively available here as `line`.
line = line.replace(/ /g,'').split(':');
const salary = Number(line.pop());
if (!isNaN(salary)) {
sum += salary;
}
}
console.log(`Sum: ${sum}`)
}
processLineByLine();

Related

How to check a line of a file is present in the file in Javascript?

I am new to javascript, I want to check whether the line of a file is there in the other file or not. I have the below code:
async getReadLiner(filename) {
const fileStream = fs.createReadStream(filename);
const rl = readline.createInterface({
input: fileStream,
});
return rl;
}
Is it possible to do something like
checkLines() {
const file1 = await this.getReadLiner(filename1);
const file2 = await this.getReadLiner(filename2);
for await (const line of file1) {
for await (const file2Line of file2) {
if (file2Line == line) // It's not executing this line at all.
return true;
}
}
return false
}
Any idea why this line(if (file2Line == line) is not executing?
Thanks in advance.
Is it possible to do something like
Yes, there's an example of that in the documentation of the readline module, which looks like this:
const fs = require('fs');
const readline = require('readline');
async function processLineByLine() {
const fileStream = fs.createReadStream('input.txt');
const rl = readline.createInterface({
input: fileStream,
crlfDelay: Infinity
});
// Note: we use the crlfDelay option to recognize all instances of CR LF
// ('\r\n') in input.txt as a single line break.
for await (const line of rl) {
// Each line in input.txt will be successively available here as `line`.
console.log(`Line from file: ${line}`);
}
}
processLineByLine();
Adapting the loop in that:
const targetLine = "helo 123";
for await (const line of getReadLiner(/*...*/)) {
if (line === targetLine) { // Or line.includes(targetLine) for a substring match
// Found it
break;
}
}

How can I overwrite and append data to the same file multiple times in node js

I have a "clients.txt" file where I have a list of emails. I try to run a program for sending emails where I chose a number of emails to use from the file, in that case the number is 2. After I use the two emails I want to overwrite "clients.txt" without them. The problem is when I try to run the code just for one single time every thing is working! but if I make a loop something is wrong. Looking forward to see any help from you guys. Thanks! I add the code bellow. PS: Sorry for my bad english!
function readEmails(){
const fs = require('fs');
clients_list = fs.readFileSync('clients.txt', 'utf8').split('\n');
let filtered = clients_list.filter(function (el) {
return el != null && el != '';
});
return filtered
}
function dump_array(arr, file){
let fs = require('fs');
let file = fs.createWriteStream(file);
file.on('error', function(err) { /* error handling */ });
arr.forEach(function(v) { file.write(v + '\n'); });
file.end();
}
while_var = 0;
while (while_var < 2){
while_var ++;
let all_clients = readEmails();
let selected_clients = [];
if (all_clients.length > 0){
selected_clients = all_clients.splice(0,2);
dump_array(all_clients, 'clients.txt');
console.log(selected_clients);
}else{
console.log('No more clients')
}
}
const fs = require('fs');
function readEmails(){
const clients_list = fs.readFileSync('clients.txt', 'utf8').split('\n');
const filtered = clients_list
// clear false, 0 and undefined too
.filter(el => !!el)
// remove extra spaces and \r symbols
.map(el => el.trim());
return filtered;
}
function dump_array(arr, file){
// Here you need sync method.
fs.writeFileSync(file, arr.join('\n'));
// And here was 'already declared' error in orginal code
}
let while_var = 0;
while (while_var++ < 2){
let all_clients = readEmails();
let selected_clients = [];
if (all_clients.length > 0){
selected_clients = all_clients.splice(0,2);
dump_array(all_clients, 'clients.txt');
console.log(selected_clients);
}else{
console.log('No more clients')
}
}

Node.js: remove specific columns from CSV file

I have a CSV file can contain around million records, how can I remove columns starting with _ and generate a resulting csv
For the sake of simplicity, consider i have the below csv
Sr.No Col1 Col2 _Col3 Col4 _Col5
1 txt png 676766 win 8787
2 jpg pdf 565657 lin 8787
3 pdf jpg 786786 lin 9898
I would want the output to be
Sr.No Col1 Col2 Col4
1 txt png win
2 jpg pdf lin
3 pdf jpg lin
Do i need to read the entire file to achive this or is there a better approach to do this.
const csv = require('csv-parser');
const fs = require('fs');
fs.createReadStream('data.csv')
.pipe(csv())
.on('data', (row) => {
// generate a new csv with removing specific column
})
.on('end', () => {
console.log('CSV file successfully processed');
});
Any help on how can i achieve this would be helpful.
Thanks.
To anyone who stumbles on the post
I was able to transform the csv's using below code using fs and csv modules.
await fs.createReadStream(m.path)
.pipe(csv.parse({delimiter: '\t', columns: true}))
.pipe(csv.transform((input) => {
delete input['_Col3'];
console.log(input);
return input;
}))
.pipe(csv.stringify({header: true}))
.pipe(fs.createWriteStream(transformedPath))
.on('finish', () => {
console.log('finish....');
}).on('error', () => {
console.log('error.....');
});
Source: https://gist.github.com/donmccurdy/6cbcd8cee74301f92b4400b376efda1d
Actually you can handle that by using two npm packages.
https://www.npmjs.com/package/csvtojson
to convert your library to JSON format
then use this
https://www.npmjs.com/package/json2csv
with the second library. If you know what are the exact fields you want. you can pass parameters to specifically select the fields you want.
const { Parser } = require('json2csv');
const fields = ['field1', 'field2', 'field3'];
const opts = { fields };
try {
const parser = new Parser(opts);
const csv = parser.parse(myData);
console.log(csv);
} catch (err) {
console.error(err);
}
Or you can modify the JSON object manually to drop those columns
Try this with csv lib
const csv = require('csv');
const fs = require('fs');
const csvString=`col1,col2
value1,value2`
csv.parse(csvString, {columns: true})
.pipe(csv.transform(({col1,col2}) => ({col1}))) // remove col2
.pipe(csv.stringify({header:true}))
.pipe(fs.createWriteStream('./file.csv'))
With this function I accomplished the column removal from a CSV
removeCol(csv, col) {
let lines = csv.split("\n");
let headers = lines[0].split(",");
let colNameToRemove = headers.find(h=> h.trim() === col);
let index = headers.indexOf(colNameToRemove);
let newLines = [];
lines.map((line)=>{
let fields = line.split(",");
fields.splice(index, 1)
newLines.push(fields)
})
let arrData = '';
for (let index = 0; index < newLines.length; index++) {
const element = newLines[index];
arrData += element.join(',') + '\n'
}
return arrData;
}

Read strings line by line from Buffer instance in Node.js module

I have a Node.js module which exports two functions init(data) , where data is Buffer ,and test(word) , where word is a string.
I would like to read lines from data Buffer instance line by line inside test() function .
I have no experience in Node.js, only JS. All I come to know from this stack is how to export multiple function from Node.js module.
Here is function declaration so far:
module.exports = {
init: function(data) {
},
test: function(word) {
}
}
according to your comment, data is instanceof Buffer, and it contains a dictionary with one english word per line. So, now you can convert data to array of string, splitting by new line characters. with module format:
module.exports.init = function (data) {
if (!(data instanceof Buffer)) {
throw new Error('not a instanceof Buffer');
}
this.currentData = data.toString().split(/(?:\r\n|\r|\n)/g);
};
module.exports.test = function (word) {
// for example
var yourTestMethod = function (lineNumber, lineContent, testWord) {
return true;
};
if (this.currentData && this.currentData.length) {
for (var line = 0; line < this.currentData.length; line++) {
if (yourTestMethod(line, this.currentData[line], word)) {
return true;
}
}
}
return false;
};
if you save this code as testModule.js, you can use this module in main code like:
// load module
var testModule = require('./testModule.js');
// init
var buf = new Buffer(/* load dictionaly */);
testModule.init(buf);
// test
console.log(testModule.test('foo'));
I think it is more simple. thanks.
(old answer)
I think you can use readline module.
But readline accepts a stream, not a buffer.
So it needs to convert. for example.
var readline = require('readline');
var stream = require('stream');
// string to buffer
var baseText = 'this is a sample text\n(empty lines ...)\n\n\n\nend line:)';
var buf = new Buffer(baseText);
// http://stackoverflow.com/questions/16038705/how-to-wrap-a-buffer-as-a-stream2-readable-stream
var bufferStream = new stream.PassThrough();
bufferStream.end(buf);
var rl = readline.createInterface({
input: bufferStream,
});
var count = 0;
rl.on('line', function (line) {
console.log('this is ' + (++count) + ' line, content = ' + line);
});
then output is:
> node test.js
this is 1 line, content = this is a sample text
this is 2 line, content = (empty lines ...)
this is 3 line, content =
this is 4 line, content =
this is 5 line, content =
this is 6 line, content = end line:)
how is that?

Overwrite a line in a file using node.js

What's the best way to overwrite a line in a large (2MB+) text file using node.js?
My current method involves
copying the entire file into a buffer.
Spliting the buffer into an array by the new line character (\n).
Overwriting the line by using the buffer index.
Then overwriting the file with the buffer after join with \n.
First, you need to search where the line starts and where it ends. Next you need to use a function for replacing the line. I have the solution for the first part using one of my libraries: Node-BufferedReader.
var lineToReplace = "your_line_to_replace";
var startLineOffset = 0;
var endLineOffset = 0;
new BufferedReader ("your_file", { encoding: "utf8" })
.on ("error", function (error){
console.log (error);
})
.on ("line", function (line, byteOffset){
startLineOffset = endLineOffset;
endLineOffset = byteOffset - 1; //byteOffset is the offset of the NEXT byte. -1 if it's the end of the file, if that's the case, endLineOffset = <the file size>
if (line === lineToReplace ){
console.log ("start: " + startLineOffset + ", end: " + endLineOffset +
", length: " + (endLineOffset - startLineOffset));
this.interrupt (); //interrupts the reading and finishes
}
})
.read ();
Maybe you can try the package replace-in-file
suppose we have a txt file as below
// file.txt
"line1"
"line2"
"line5"
"line6"
"line1"
"line2"
"line5"
"line6"
and we want to replace:
line1 -> line3
line2 -> line4
Then, we can do it like this:
const replace = require('replace-in-file');
const options = {
files: "./file.txt",
from: [/line1/g, /line2/g],
to: ["line3", "line4"]
};
replace(options)
.then(result => {
console.log("Replacement results: ",result);
})
.catch(error => {
console.log(error);
});
the result as below:
// file.txt
"line3"
"line4"
"line5"
"line6"
"line3"
"line4"
"line5"
"line6"
More details please refer to its docs: https://www.npmjs.com/package/replace-in-file
This isn't file size focoused solution, but Overwrites a line in a file using node.js. It may help other people that search engines redirect to this post, like me.
import * as fs from 'fs'
const filename = process.argv[2]
const lineIndexToUpdate = parseInt(process.argv[3]) - 1
const textUpdate = process.argv[4]
function filterLine(indexToUpdate, dataString) {
return dataString
.split('\n')
.map((val, index) => {
if (index === indexToUpdate)
return textUpdate
else
return val
})
.join('\n')
}
fs.readFile(filename, 'utf8', (err, data) => {
if (err) throw err
fs.writeFile(filename, filterLine(lineIndexToUpdate, data), (err, data) => {
if (err) throw err
console.log("Line removed")
})
})
Script use exemple:
node update_line.js file 10 "te voglio benne"

Categories

Resources