Saving and deleting lines to & from file in node.js - javascript

I have written code for a chatroom, I am trying to implement a ban list which bans by username.
In the file I want it to look something like this (without the blank lines between each other)..
UserToBan1 Banned-By Reason-For-Ban
UserToBan2 Banned-By Reason-For-Ban
UserToBan3 Banned-By Reason-For-Ban
I want to be able to check if the person is listed in that file by username.
Want to be able to remove the line from the list (unban) and to be able to add someone to the file.
I am new to node.js and javascript but I don't know what would be the best way to do this. I have created a banlist.json file which I know how to open and close but adding, removing lines and checking the first variable is where I am stuck.
EDIT:
This the code I am now working with but seems to produce a null value when I console.log(data) or console.log(content).
s.on('connection', function(ws) {
ws.on('message', function(message){
// START only on connection
message = JSON.parse(message);
if(message.type == "name"){
// start check for double login
var ConnectingUser = message.data;
var found = 0;
s.clients.forEach(function e(client) {
var ConnectedUser = client.personName;
if(ConnectedUser == ConnectingUser) {
client.send(JSON.stringify(
{
name: "Server",
data: "***We do not allow double logins!"
}
));
client.send(JSON.stringify(
{
name: "Server",
data: "🔴 Disconnected..."
}
));
client.close();
}
});
// end check for double login
console.log("Client <"+message.data+"> Connected");
memberJoinedChatMsg(ws, message);
ws.personName = message.data;
return;
}
// -- test stuff start ------------------------------------------------------------
var file = './banlist/banned.json';
fs = require('fs');
fs.readFile(file, function(content) {
var data = JSON.parse(content);
console.log(Object.keys(data));
// Delete line
delete data["UserToBan1"]
console.log(Object.keys(data));
// Convert JSON object to string
var transformed_content = JSON.dumps(data);
// write file here
fs.writeFile(file, transformed_content, function(err) {
if (err) {
console.log("Error writing file: " + (err.stack || err))
} else {
console.log("Saved file")
}
})
});
// -- test stuff end ------------------------------------------------------------

If you know how to read/write a file, you can directly use store the data as JSON in that file, e.g.:
{
"UserToBan1": {
"bannedby": "user who banned UserToBan1",
"reason": "reason for the ban"
},
"UserToBan2": {
"bannedby": "user who banned UserToBan2",
"reason": "reason for the ban"
},
"UserToBan3": {
"bannedby": "user who banned UserToBan3",
"reason": "reason for the ban"
}
}
When reading the file, parse the file content as JSON:
fs = require('fs');
var file = "/path/to/json/file";
fs.readFile(file, function(err, content) {
if (err) {
console.log("Error reading file: " + (err.stack || err))
} else {
var data = JSON.parse(content);
console.log(Object.keys(data));
// Delete line (see: https://stackoverflow.com/questions/3455405/how-do-i-remove-a-key-from-a-javascript-object/28797751)
// example to delete user from list
delete data["UserToBan1"]
// example to add user to list
data["UserToBan4"] = {
"bannedby": "user who banned UserToBan4",
"reason": "reason for banning UserToBan4"
}
console.log(Object.keys(data));
// Convert JSON object to string
var transformed_content = JSON.stringify(data, null, 4);
// write file here
fs.writeFile(file, transformed_content, function(err) {
if (err) {
console.log("Error writing file: " + (err.stack || err))
} else {
console.log("Saved file")
}
})
}
});

Related

Javascript: parsing emails into json/building json file from text

I have a self-made email parser that pulls email body's that match a specific subject and writes each email to individual text files in a folder, using fs:
module.exports = {
run: function(){
var Imap = require('imap'),
inspect = require('util').inspect;
var fs = require('fs'), fileStream;
var logger = require('./logger.js');
var buffer = '';
var imap = new Imap({
user: "tracker#email.com",
password: "xxxxxxxx",
host: "host.secureserver.net",
port: 993,
tls: true,
connTimeout: 10000,
authTimeout: 5000,
debug: console.log,
tlsOptions: { rejectUnauthorized: true },
mailbox: "INBOX",
searchFilter: ["UNSEEN", "FLAGGED"], // the search filter being used after an IDLE notification has been retrieved
markSeen: false, // all fetched email willbe marked as seen and not fetched next time
fetchUnreadOnStart: true, // use it only if you want to get all unread email on lib start. Default is `false`,
mailParserOptions: { streamAttachments: false }, // options to be passed to mailParser lib.
attachments: false, // download attachments as they are encountered to the project directory
attachmentOptions: { directory: "attachments/" }
});
function openInbox(cb){
imap.openBox('Inbox', false, cb);
}
imap.once('ready', function(){
logger.printWriteLine('Parsing inbox for new error alerts...', 1);
openInbox(function (err, box){
if(err) throw err;
imap.search(
['UNSEEN', ['SUBJECT', 'Error Alerts']],
function(err, results){
if(err) throw err;
else if(!results || !results.length){
logger.printWriteLine('No new emails', 2);
}
else{
var f = imap.fetch(results, {bodies: '1', markSeen: true});
f.on('message', function(msg, seqno){
logger.printWriteLine('message #:'+seqno, 1);
logger.printWriteLine('message type: '+msg.txt, 1);
var prefix = '(#' + seqno + ') ';
msg.on('body', function (stream, info){
stream.on('data', function(chunk){
buffer += chunk.toString('utf8');
//console.log('Buffer: '+buffer);
})
stream.once('end', function(){
if(info.which === '1'){
//console.log('Buffer 2: ' + buffer);
}
});
stream.pipe(fs.createWriteStream('./mailParser/'+ seqno + '-body.txt'));
});
msg.once('end', function () {
logger.printWriteLine(prefix + ' - End of message.', 1);
});
});
f.once('error', function (err) {
console.log('Fetch error: ' + err);
});
f.once('end', function () {
logger.printWriteLine('Done fetching messages.', 1);
imap.end();
});
}
});
});
});
imap.once('error', function (err) {
console.log(err);
});
imap.once('end', function () {
console.log('Connection ended');
});
imap.connect();
}
}
The resulting text files are each formatted like this:
Vehicle ID720
DRIDT0MA12200330
Event TypeMediaError - SD1,Protected
Event Local Time2022-09-20 16:54:18
Event Time GMT2022-09-20 20:54:18
URLView Event
More DataSD1,Protected
Vehicle Registration
Vehicle VIN
Vehicle Manufacturer
Vehicle Model
Vehicle FuelColorTransmissionPolicy HolderPolicy NumberCustom 1Custom 2
My goal is to parse/pull relevant data from these emails, and build the details into a json file. Right now I'm stuck trying to loop through each text file, parse/pull the relevant details with regex/"simple-text-parser", and build into a json file to be consumed by other parts of my program.
goal output would be something like this:
{
"vehicle1": {
"number": "720",
"DRID": "T0MA12200330",
"Type": "Media Error - SD1,Protected"
"Time": "2022-09-20 16:54:18"
},
"vehicle2: {
...
}
}
Obviously this is an inefficient, round-about way to accomplish this (with the text files). I'm wondering if anybody can suggest a better way to parse the emails from my mail parser, so perhaps I can go straight to JSON, or any alternate methods one might use to accomplish what I'm trying to do.
Please understand I'm not a professional dev so my code may be pretty rough.

forEach not working as expected in NodeJs

I am uploading the excel sheet in DB with the help of Nodejs, I am unable to authenticate and return the error as already exists the userid when the item.USER_ID already exists in DB. my server goes crashes and returns an error as Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client
Please help in the code how I fix this issue and make it, If the item.USER_ID already exists return error else insert.
var XLSX = require("xlsx");
const fs = require("fs");
try {
const transaction = await con.transaction();
var workbook = XLSX.readFile("myfile.xlsx");
let json_data = XLSX.utils.sheet_to_json(workbook.Sheets.Sheet1);
let count = 0;
json_data.map(async (item) => {
let stmt1 = await con.query("SELECT * FROM `table` WHERE `user_id` = :userid", { replacements: { userid: item.USER_ID }, type: con.QueryTypes.SELECT });
if (stmt1.length > 0) {
await transaction.rollback();
return res.json({ message: "already exist the userid" });
} else {
let stmt2 = await con.query("INSERT INTO `table` (`user_id` , `user_name`) VALUES ( :user_id , :user_name)", {
replacements: {
user_id: item.USER_ID,
user_name: item.USER_NAME,
},
type: con.QueryTypes.INSERT,
transaction: transaction,
});
count++;
if (count == json_data.length) {
await transaction.commit();
return res.json({ message: "file uploaded successfully.." });
}
}
});
} catch (err) {
await transaction.rollback();
return res.json({ code: 500, message: { msg: "SQL ERROR" }, error: err.stack, status: "error" });
}
Here in your code, you are calling the res.json({ message: "file uploaded successfully.." }) inside json_data.map function.
since you are calling the res.json function inside an array, it'll be called as many times as of elements present in the array and as we know, we can sent only 1 response at a time for a request.
Because of which you're catching the errors Cannot set headers after they are sent to the client
just remove that res.json inside the map function, add it at the last of that particular map function.
I know you might question for the condition count == json_data.length you added to the code but javascript is async and this particular block can be executed before to that.
Hope this answer helps you! Please comment if you get any errors or have questions.

SQLITE_MISUSE: bad parameter or other API misuse [duplicate]

I've searched on how to create a sqlite3 database with a callback in Node.js and have not been able to find any links. Can someone point me towards documentation or provide a 2-3 line code sample to achieve the following:
Create a sqlite3 database and catch an error if the creation fails for any reason.
Here is what I've tried:
let dbCreate = new sqlite3.Database("./user1.db", sqlite3.OPEN_CREATE, function(err){
if(!err){
logger.infoLog("Successfully created DB file: " + dbFileForUser + " for user: " + username );
} else {
logger.infoLog("Failed to create DB file: " + dbFileForUser + ". Error: " + err );
}
});
dbHandler[username] = dbCreate;
When I execute this, I get the following error:
"Failed to create DB file: ./database/user1.db. Error: Error: SQLITE_MISUSE: bad parameter or other API misuse"
This call without callback works just fine.
var customDB = new sqlite3.Database("./custom.db", sqlite3.OPEN_READWRITE | sqlite3.OPEN_CREATE);
But in this, I will not know if I run into any errors while creating the Database.
Try this:
let userDB = new sqlite3.Database("./user1.db",
sqlite3.OPEN_READWRITE | sqlite3.OPEN_CREATE,
(err) => {
// do your thing
});
Example.
#Irvin is correct, we can have a look at http://www.sqlitetutorial.net/sqlite-nodejs/connect/ and
check it says if you skip the 2nd parameter, it takes default value as sqlite3.OPEN_READWRITE | sqlite3.OPEN_CREATE
and in this case if database does not exist new database will be created with connection.
sqlite3.OPEN_READWRITE: It is to open database connection and perform read and write operation.
sqlite3.OPEN_CREATE : It is to create database (if it does not exist) and open connection.
So here is the first way where you have to skip the 2nd parameter and close the problem without an extra effort.
const sqlite3 = require("sqlite3").verbose();
let db = new sqlite3.Database('./user1.db', (err) => {
if (err) {
console.error(err.message);
} else {
console.log('Connected to the chinook database.|');
}
});
db.close((err) => {
if (err) {
return console.error(err.message);
}
console.log('Close the database connection.');
});
And this is the 2nd way to connect with database (already answered by #Irvin).
const sqlite3 = require("sqlite3").verbose();
let db = new sqlite3.Database('./user1.db', sqlite3.OPEN_READWRITE | sqlite3.OPEN_CREATE
, (err) => {
if (err) {
console.error(err.message);
} else {
console.log('Connected to the chinook database.');
}
});
db.close((err) => {
if (err) {
return console.error(err.message);
}
console.log('Close the database connection.');
});

NodeJS return SQL queries to pass to another file

I have been struggling with the returning of data for the past 2 days. I really need help in getting the data to show in another .js file but I can't seem to do so.
From all the research I have done, I know that I need the callback function in order to do the return. When I output the data in file1.js, it shows, which is correct.
However, I need to access the returned data in my file2.js but it is not showing.
Am I missing anything out? Please help, any response is greatly appreciated. Thanks.
Note that my return statement in file1.js is near the end of the code.
Also, my "res" array is ALWAYS empty when accessed outside the function. Why is this so?
file1.js
var sql = require("mssql");
// Create a configuration object for our Azure SQL connection parameters
var dbConfig = {
server: "***", // Use your SQL server name
database: "***", // Database to connect to
user: "***", // Use your username
password: "***", // Use your password
port: 1433,
// Since we're on Windows Azure, we need to set the following options
options: {
encrypt: true
}
};
var obj = {};
var res = [];
// This function connects to a SQL server, executes a SELECT statement,
// and displays the results in the console.
function getProducts(callback) {
// Create connection instance
var conn = new sql.ConnectionPool(dbConfig);
conn.connect()
// Successfull connection
.then(function () {
// Create request instance, passing in connection instance
var req = new sql.Request(conn);
// Call mssql's query method passing in params
req.query("SELECT sp.product_name, count(ss.product_id) as 'quantity' " +
"FROM smartcoolerstocks ss JOIN smartcoolerproducts sp " +
"ON sp.product_id = ss.product_id " +
"GROUP by sp.product_name ")
.then(function (recordset) {
//console.log(recordset.recordset);
conn.close();
//NEED CALLBACK FUNCTION
console.log(recordset.recordset.length);
for(var i = 0; i<recordset.recordset.length; i++ ){
res.push(recordset.recordset[i]);
}
callback(null,recordset.recordset);
process.exit(1);
})
// Handle sql statement execution errors
.catch(function (err) {
console.log(err);
conn.close();
})
})
// Handle connection errors
.catch(function (err) {
console.log(err);
conn.close();
});
}
//call Fn for db query with callback
getProducts(function(err,data){
if (err) {
// error handling code goes here
console.log("ERROR : ",err);
} else {
// code to execute on data retrieval
//console.log("result from db is : ",data.recordset);
//return data.recordset;
return res;
}
});
console.log(res); //WHY IS THIS EMPTY HERE?
module.exports = {
getProducts(){},
};
Blockquote
file2.js
var myDB2 = require('./sqltest2');
console.log(myDB2.getProducts());
Here's my output in cmd:
After the '7', there's nothing showing.
My IDEAL output should be the following if I manage to get the returned data in file2.js from file1.js:
You cannot see the res because its race condition, your console.log(res) executed earlier than recordset callback.
file1.js already executed the getProducts function so there is not data that returned to file2.js.
var sql = require("mssql");
// Create a configuration object for our Azure SQL connection parameters
var dbConfig = {
server: "***", // Use your SQL server name
database: "***", // Database to connect to
user: "***", // Use your username
password: "***", // Use your password
port: 1433,
// Since we're on Windows Azure, we need to set the following options
options: {
encrypt: true
}
};
var obj = {};
var res = [];
// This function connects to a SQL server, executes a SELECT statement,
// and displays the results in the console.
function getProducts(callback) {
// Create connection instance
var conn = new sql.ConnectionPool(dbConfig);
conn.connect()
// Successfull connection
.then(function() {
// Create request instance, passing in connection instance
var req = new sql.Request(conn);
// Call mssql's query method passing in params
req.query("SELECT sp.product_name, count(ss.product_id) as 'quantity' " +
"FROM smartcoolerstocks ss JOIN smartcoolerproducts sp " +
"ON sp.product_id = ss.product_id " +
"GROUP by sp.product_name ")
.then(function(recordset) {
conn.close();
callback(null, recordset.recordset);
process.exit(1);
})
// Handle sql statement execution errors
.catch(function(err) {
console.log(err);
conn.close();
callback(err, null);
})
}).catch(function(err) {
console.log(err);
conn.close();
callback(err, null);
});
}
module.exports = getProducts;
And file2.js
var myDB2 = require('./sqltest2');
myDB2(function(err, data){
if( err ) console.log(err);
else console.log(data);
});

Using Promise for parallel insert query on MySQL fails

I wrote a code for running insert queries in parallel in Node.js and I am also using Promise.js.
But the code fails and raises an exception of "Duplicate Primary Key" entry.
The code is as follows,
var Promise = require("promise");
var mySql = require("mysql");
var _ = require("underscore");
var connection = mySql.createConnection({
host : "localhost",
user : "root",
password : "rahul",
database : "testDb" //schema
});
connection.connect();
function insertDept(name){
return new Promise(fn);
function fn(resolve,reject){
getMaxDept().then(function(rows){
var deptId = rows[0]["DeptId"];
deptId = (_.isNull(deptId) === true) ? 125 : deptId;
var sql = "insert into departmentTbl values("+deptId+",'"+name+"')";
console.log(sql);
connection.query(sql,function(err,rows,fields){
if(err){
console.log(err);
return reject(err);
}else{
return resolve(rows);
}
});
}).catch(function(error){
return reject(error);
});
}//fn
}//insertDept
function getMaxDept(){
return new Promise(fn);
function fn(resolve,reject){
var sql = "select max(deptId) + 1 as 'DeptId' from departmentTbl";
connection.query(sql,function(err,rows,fields){
if(err){
console.log(err.stack);
return reject(err);
}else{
return resolve(rows);
}
});
}// fn
} //getMaxDept
function createDeptForAll(){
var promiseObj = [];
if(arguments.length > 0){
_.each(arguments,callback);
}else{
throw "No departments passed";
}
function callback(deptName){
promiseObj.push(insertDept(deptName))
}
return Promise.all(promiseObj);
}//createDeptForAll
createDeptForAll("Archiology","Anthropology").then(function(createDepartment){
createDepartment.then(function(rows){
console.log("Rows inserted "+rows["affectedRows"]);
}).catch(function(error){
console.log(error);
}).done(function(){
connection.end();
});
});
When I run the above code code,
the output is
rahul#rahul:~/myPractise/NodeWebApp/NodeMySqlv1.0$ node queryUsingPromise02.js
insert into departmentTbl values(125,'Archiology')
insert into departmentTbl values(125,'Anthropology')
{ [Error: ER_DUP_ENTRY: Duplicate entry '125' for key 'PRIMARY'] code: 'ER_DUP_ENTRY', errno: 1062, sqlState: '23000', index: 0 }
As Department Id is Primary key and the promises run in parallel,
the primary key for second Department's insert query fails.
As you can see, before any insert query, I fetch the max of departments + 1.
if the above query fails, I assign '125'.
Now, what should I change so that my above written code runs.
Should I use trigger of "before insert" for calculating next value of primary key of "department ID" at the database level itself or should I do something in my own Node.js code?
This issue is not restricted to node or JavaScript, but you will face this problem with any technology that tries to write to an SQL database in parallel. Unique id generation in a scenario like this is not trivial.
If you have the option to do so, make your id field in your database AUTO_INCREMENT, this will save you a lot of headaches in situations like this.
More about AUTO_INCREMENT.
Advice on AUTO_INCREMENT looks good.
You might also consider writing a promisifier for connection.query(), allowing for the remaining code to be tidied up.
So with getMaxDept() purged and a connection.queryAsync() utility in place, you might end up with something like this :
var Promise = require("promise");
var mySql = require("mysql");
var connection = mySql.createConnection({
host: "localhost",
user: "root",
password: "rahul",
database: "testDb" //schema
});
connection.connect();
// promisifier for connection.query()
connection.queryAsync = function(sql) {
return new Promise((resolve, reject) => {
connection.query(sql, (err, rows, fields) => {
if(err) { reject(err); }
else { resolve({'rows':rows, 'fields':fields}); }
});
});
};
function insertDept(name) {
var sql = "insert into departmentTbl values(" + name + "')"; // assumed - needs checking
return connection.queryAsync(sql);
}
function createDeptForAll(departments) {
if(departments.length > 0) {
return Promise.all(departments.map(insertDept));
} else {
return Promise.reject(new Error('No departments passed'));
}
}
createDeptForAll(['Archiology', 'Anthropology']).then((results) => {
results.forEach((result) => {
console.log("Rows inserted " + result.rows.affectedRows);
connection.end();
});
}).catch((error) => {
console.log(error);
connection.end();
});

Categories

Resources