I am trying to learn how to write my own modules for nodejs, specifically to implement various objects, with which I could then work throughout the app.
I want to use result something like this:
//assuming i have database:
Person_table(ID int A_I, NAME varchar, AGE int)
//code:
var p = new Person("John", 22);
p.writeToDatabase();
//the object is now written in database
I have tried the following, but for some reason beyond my understanding it doesn't work.
I have declared three files: db.js , person.js , app.js
db.js
var mysql = require('mysql');
var conn = mysql.createPool({
host : 'localhost',
database: 'db_name',
user: 'user_name',
password : 'pass',
multipleStatement: true,
connectionLimit : 10
});
conn.getConnection(function(err){
if(err) throw err;
});
module.exports = conn;
person.js
var db = require('./db.js');
function Person(n, a) {
this.name = n;
this.age = a;
}
Person.prototype.writeToDatabase = function (callback) {
db.query("INSERT INTO Person_table(NAME, AGE) VALUES(?,?)", [this.name, this.age], function (err, rows) {
if (err) return callback(err);
else return callback(null, rows);
});
}
module.exports = Person;
app.js
var Person = require('./person.js')
var p = new Person("John", 22);
p.writeToDatabase(function(err, rows){
if(err) console.log(err);
else console.log("written to DB");
});
I would appreciate the help on what is wrong with the code.
As a bonus, I would like to ask any good literature on subject of module exporting and object prototypes to create layered nodejs app.
The first advice I'll give is the following, you can use your callback like this in your person.js:
Person.prototype.writeToDatabase = function (callback) {
db.query("INSERT INTO Person_table(NAME, AGE) VALUES(?,?)",
[this.name, this.age],
callback // Since you do nothing with `err` or `rows`
);
}
Why ? Because you handle the callback in your app.js file :).
Next, I think it doesn't work because you don't wait for your function conn.getConnection to end, and moreover, you don't use the return of this function ! Check the documentation here.
You should reorganize your code like:
db.js
var mysql = require('mysql');
module.exports = mysql.createPool({
host : 'localhost',
database: 'db_name',
user: 'user_name',
password : 'pass',
multipleStatement: true,
connectionLimit : 10
});
person.js
module.exports = function (connection) { // You need the connection to query your database.
function Person(n, a) {
this.name = n;
this.age = a;
}
Person.prototype.writeToDatabase = function (callback) {
// The connection is used here.
connection.query("INSERT INTO Person_table(NAME, AGE) VALUES(?,?)",
[this.name, this.age],
callback
);
}
return Person;
};
app.js
var db = require('./db');
db.getConnection(function (err, connection) { // Once you're connected to the database, you can start using your app.
var Person = require('./person')(connection); // Since we export a function with a parameter, we need to require the person.js like this.
var p = new Person("John", 22);
p.writeToDatabase(function(err, rows) {
if(err) console.log(err);
else console.log("written to DB");
});
});
If I'm not clear, you can ask for more details :). If the pattern I show you doesn't fit with your expectation you can do the job in other ways ;)
I figured out the problem. My example is working perfectly, I had just one totally not related problem.
At the end of my short program i had process.exit() so that my app would end when it would finish the task. However I haven't taken into account the asynchronous nature of node.js so, my process.exit() executed before my write to database was completed, therefore it failed.
Thanks for help anyway!
Related
I am trying to export database properties stored in properties file from Javascript module. By the time I read database properties file, Javascript file is already exported and data properties appear undefined wherever I use in other modules.
const Pool = require('pg').Pool;
const fs = require('fs')
const path = require('path');
class DbConfig {
constructor(dbData) {
this.pool = new Pool({
user: dbData['user'],
host: dbData['host'],
database: dbData['database'],
password: dbData['password'],
max: 20,
port: 5432
});
}
}
function getdbconf() {
const dbData = {};
fs.readFile("../../db_properties.txt"), 'utf8', (err, data) => {
if (err) {
console.error(err)
return
}
// dbData = {"user":"postgres", "password": "1234"...};
return dbData;
});
}
let db = new DbConfig(getdbconf());
let dbPool = db.pool;
console.log("dbpool : -> : ",dbPool); // username and password appear undefined
module.exports = { dbPool };
Is there a way to read data before exporting data from Javascript module?
Usually database config or any other sensitive info is read from a .env file using dotenv .
Or
you could also provide env from command line itself like
DB_HOST=127.0.0.1 node index.js
inside your index.js
console.log(process.env.DB_HOST)
Please create a new file (connection-pool.js) and paste this code:
const { Pool } = require('pg');
const poolConnection = new Pool({
user: 'postgresUserName',
host: 'yourHost',
database: 'someNameDataBase',
password: 'postgresUserPassword',
port: 5432,
});
console.log('connectionOptions', poolConnection.options);
module.exports = poolConnection;
For use it, create a new file (demo-connection.js) and paste this code:
const pool = require('./connection-pool');
pool.query('SELECT NOW();', (err, res) => {
if (err) {
// throw err;
console.log('connection error');
return;
}
if (res) {
console.log(res.rows);
pool.end();
}
});
This is an alternative option 🙂
Exporting the result of async calls
To export values which have been obtained asynchronously, export a Promise.
const fs = require('fs/promises'); // `/promise` means no callbacks, Promise returned
const dbDataPromise = fs.readFile('fileToRead')); //`readFile` returns Promise now
module.exports = dbDataPromise;
Importing
When you need to use the value,
const dbDataPromise = require('./dbdata');
async init() {
const dbData = await dbDataPromise;
}
//or without async, using Promise callbacks
init() {
dbDataPromise
.then(dbData => the rest of your code that depends on dbData here);
}
Current code broken
Please note that your current code, as pasted above, is broken:
function getdbconf() {
const dbData = {};
fs.readFile("../../db_properties.txt"), 'utf8', (err, data) => {
//[...] snipped for brevity
return dbData;
});
}
fs.readFile "returns" dbData, but there is nothing to return to, since you are in a callback which you did not call yourself. Function getdbconf returns nothing.
The line that says let db = new DbConfig(getdbconf()); will NOT work. It needs to be inside the callback.
The only way to avoid putting all of your code inside the callback (and "flatten" it) is to use await, or to use readFileSync
Avoiding the issue
Using environment variables
Suhas Nama's suggestion is a good one, and is common practice. Try putting the values you need in environment variables.
Using synchronous readFile
While using synchronous calls does block the event loop, it's ok to do during initialization, before your app is up and running.
This avoids the problem of having everything in a callback or having to export Promises, and is often the best solution.
I'm trying to receive same version data from my MySQL (MariaDB) Server.
For better maintenance i created one connection Object to handle all database queries.
However when I query some data, it seems like it isn't executed in time, but later when the first await command appears.
dbControl.js :
var mysql = require('mysql');
function getConnection(){
let dbConnection = mysql.createConnection({
host: "localhost",
user: "root",
password: ""
});
dbConnection.connect(function (err) {
if (err) throw err;
});
this.get_version = function() {
let sql = 'SELECT * FROM versionControl ORDER BY id DESC LIMIT 1;'
dbConnection.query(sql, function (err, result) {
if (err) throw err;
console.log("vData:", result);
return result;
});
}
}
module.exports.getConnection = getConnection;
dataHandler.js:
const browserControl = require('./browserControl');
const dbControl = require('../db/dbControl');
const dbConnection = new dbControl.getConnection();
let versionData;
// Here it should be executed -->
versionData = dbConnection.get_version();
console.log(versionData);
async function get_something(){
// Here it is executed -->
const browser = await browserControl.startBrowser();
//......
}
There is a 3th file which simply controls the program. At the moment it just executes the function get_something() like:
const originData = require('./dataHandler.js');
let data = originData.get_something();
console.log(data);
P.s.: its all running with node, thanks in advance ;_)
Your get_something() is marked as async.
Call it with await get_something() or get_something().then(console.log).
Ok, I got a solution. The SQL query function returns a promise now and I created an extra "getVersion"-async-function which is waiting for the resolve of the promise. Thus the promise waits for the db answer and the rest waits until the promise is resolved.
the dataHandler.js now looks like this:
const browserControl = require('./browserControl');
const dbControl = require('../db/dbControl');
const dbConnection = new dbControl.getConnection();
async function getVersion() {
let versionData;
versionData = await dbConnection.get_version();
console.log(versionData);
}
getVersion();
async function get_something(){
const browser = await browserControl.startBrowser();
}
and the query-function now looks like this:
this.get_version = function() {
let sql = 'SELECT * FROM versionControl.lol_scraper ORDER BY id DESC LIMIT 1;'
return new Promise(resolve => {
dbConnection.query(sql, function (err, result) {
if (err) throw err;
console.log("vData:", result);
resolve(result);
})
});
P.s.: still open for smarter or more modern solutions ;_)
In my JS Lambda function I have something along the lines of the following...
import utils from './utils'
index.handler() {
return utils.initDB()
.then(function() {
return utils.doSomething()
utils.js:
var dbConfig = null;
var knex = null;
function initDB() {
dbConfig = require('../db');
knex = require('knex')(dbConfig);
return;
}
Basically, how should I pass around the knex object? Is it okay to have as a global var in the utils file? Should I return it to the handler and pass it into every smsUtils.doX call? I'm thinking this might be causing problems with db connection/pooling, but I don't know how to find out.
For anyone who stumbles on this in the future (i.e. me when I'm googling how to do this again in a year):
http://blog.rowanudell.com/database-connections-in-lambda/ explains connection reuse in Lambda. Should look something like this:
const pg = require('pg');
const client = new pg.Client('postgres://myrds:5432/dbname');
client.connect();
exports.handler = (event, context, cb) => {
client.query('SELECT * FROM users WHERE ', (err, users) => {
// Do stuff with users
cb(null); // Finish the function cleanly
});
};
I'm using a MEAN stack and writing these methods in Mongoose. I'm wondering what's wrong with what I put in the Mongoose model file. I would like to use Mongoose to simply print out a list all the documents in the myModel collection.
myModel.methods.myMethod = function(cb){
this.model("Bids").find({}, 'myField', function(err, results){
if (err){console.log(err);return err;}
console.log("okay");
console.log(results);
})
this.save(cb);
}
Also, what is the code that I can write in Mongoose to tell if the myModel collection is empty or not?
It's better to teach a man how to fish than to give him a fish ...
So it would be extremely helpful if you can suggest what debugging tools I can install, such as an Express middleware, that can help me debug myself. Please post your debugging suggestions here.
I'm assuming every other setup required for mongoose is correct.
At the line below, I think 'myField' is not needed.
this.model("Bids").find({}, 'myField', function(err, results)
Here is something more from scratch, maybe it would help you to trace-back you steps:
var mongoose = require('mongoose');
//connection to Mongodb instance running on=======
//local machine or anywhere=========================
var uri = 'mongodb://localhost:27017/test';
var connection = mongoose.createConnection(uri);
//Define Schema==================================
var Schema = mongoose.Schema;
var BlogPostSchema = new Schema({
author: { type: Schema.Types.ObjectId },
title: String,
body: String
});
//Create model===================================================
var BlogPostModel = connection.model('BlogPost', BlogPostSchema);
//function to insert doc into model NOTE "pass in your =======
//callback or do away with it if you don't need one"=========
var insertBlogPost = function (doc, callback) {
//here is where or doc is converted to mongoose object
var newblogPost = new BlogPostModel(doc);
//save to db
newblogPost.save(function (err) {
assert.equal(null, err);
//invoke your call back if any
callback();
console.log("saved successfully");
});
};
//function to get all BlogPosts====================================
var getAllBlogPosts = function (callback) {
//mongoose get all docs. I think here answers your question directly
BlogPostModel.find(function (err, results) {
assert.equal(null, err);
//invoke callback with your mongoose returned result
callback(results);
});
};
//you can add as many functions as you need.
//Put all of your methods in a single object interface
//and expose this object using module.
var BlogPostManager = {
insertBlogPost: insertBlogPost,
getAllBlogPosts : getAllBlogPosts
}
module.exports = BlogPostManager;
I am new to mock concept and javascript programming either. I want to to mock pg (postgres module) in the javascript program. I can imitate very simple scenario, but in actual I don't.
Here is my userHandler.js:
var pg = require('pg');
var connectionString = process.env.DATABASE_URL || 'postgres://admin:admin#localhost:5432/mydb';
exports.handlePost = function(req,res){
var results = [];
// Grab data from http request
var adata = [req.body.Username, ..., req.body.FaxNum]; //Ignore for short.
// Get a Postgres client from the connection pool
pg.connect(connectionString, function(err, client, done) {
// SQL Query > Insert Data
var func_ = 'SELECT Dugong.Users_Add($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19)';
var addUser_ = client.query(func_, adata);
addUser_.on('error', function(error){
var data = {success : false,
username : req.body.Username,
reason : {errmsg : error.detail,
errid : 'addUser_' }};
return res.json(data);
});
addUser_.on('end',function(result){
var data = {success : true, username : req.body.Username};
console.log('Insert record completed');
return res.json(data);
});
// Handle Errors
if(err) {
console.log(err);
return ;
}
return;
});
};
And here is my unit test file. m_users_page.js:
var express = require('express');
var router = express.Router();
var test = require('unit.js');
var mock = require('mock');
var httpMocks = require('node-mocks-http');
var real_users_page = require('../routes/users_page.js');
var b = mock("../routes/userHandler.js", {
pg: {
connect: function (connectionString,callback) {
if(connectionString === 'postgres://admin:admin#localhost:5432/skorplusdb'){
console.log('333');
//pseudo object
var client = {query : function(func_, adata, cb){
cb(null,adata);
}};
client.on('error', 'test emit the error in my mock unit.');
//pseudo done object
var done = function(){};
callback(null, client, done);
return ;
}
}
}
}, require);
describe('Test with static login', function(){
it('Test simple login', function(done){
var request = httpMocks.createRequest({
method: 'POST',
url: '/users',
body: { Username:"Je", ..., FaxAreaCode:'232'} //Ignore for short
});
var response = httpMocks.createResponse();
b.handlePost(request,response, function(){
var data = response._getData();
console.log("7777777777" + data);
done();
});
});
});
Here is the error :
$ mocha testing/m_users_page.js
Test with static login
333
1) Test simple login
0 passing (7ms)
1 failing
1) Test with static login Test simple login:
TypeError: Object #<Object> has no method 'on'
at Object.mock.pg.connect (testing/m_users_page.js:22:14)
at Object.exports.handlePost (routes/userHandler.js:30:6)
at Context.<anonymous> (testing/m_users_page.js:63:5)
My questions are:
What is a proper way to do a unit test in Node + Express + Mock + node-mocks-http?
How to find good framework with well document I must read. After several days, I started to circling around the result from search engines. They are too simple, I can't adapt it to my problem.
First, make sure you understand the difference between unit tests and integration tests. If you want to test against the actual db, even if it has a dummy data set, that's an integration test and it doesn't need a mock: just connect to the database with the dummy data.
But suppose you want to test your webserver module, and you want to mock the db. First, pass the database module as a parameter rather than requiring pg directly. Also, wrap the postgres interface with your own class:
const { Pool } = require('pg');
module.exports = class DatabaseInterop {
// Connection parameters can be passed to the constructor or the connect method, parameters to
// DatabaseInterop::connect will override the initial constructor parameters.
constructor ({
user,
password,
database,
host,
logger={log: console.log, err: console.error},
}) {
this.logger = logger;
this._params = {
user,
password,
database,
host,
};
}
connect (params) {
const {
user,
password,
database,
host,
} = Object.assign({}, this._params, params);
this._pool = new Pool({
user,
password,
database,
host,
});
['SIGHUP', 'SIGINT', 'SIGQUIT', 'SIGILL', 'SIGTRAP', 'SIGABRT',
'SIGBUS', 'SIGFPE', 'SIGUSR1', 'SIGSEGV', 'SIGUSR2', 'SIGTERM'
].forEach(function (sig) {
process.on(sig, async () => {
logger.log(`Exiting for ${sig}...`);
process.exit(0);
});
});
return this;
}
async stop () {
return this._pool.end();
}
runQuery (queryString, params=[]) {
return params.length ? this._pool.query(queryString, params) : this._pool.query(queryString);
}
};
Now to mock it out, you can simply extend your custom class in your test file:
const DatabaseInterop = require('/path/to/database_interop.js');
class MockDB extends DatabaseInterop {
connect () {
// no-op
}
runQuery (qs, ...params) {
// return whatever
}
stop () {
// noop
}
}
Now for your tests you can inject the mock and your actual system inject the actual interface.