I am trying to connect my application to the database using the connection pool method, its connecting fine, and data insertion is happening fine without any issues but other queries in the same file are slowing down.
I have tried with release() method also not working properly.
How can release the pool to the next query once it's executed the current query?
Below is my dbpool.js file code where I am writing a common generalized database connection,
var pg = require('pg');
var PGUSER = 'postgres';
var PGDATABASE = 'test_database';
var config = {
user: PGUSER, // name of the user account
host: 'localhost',
database: PGDATABASE, // name of the database
password: 'password#AWS',
port: 5432,
max: 10,
idleTimeoutMillis: 10000
};
const pool = new pg.Pool(config);
const DB = {
query: function(query, callback) {
pool.connect((err, client, done) => {
if(err){ return callback(err); }
client.query(query, (err, results) => {
// done();
client.release();
// if(err) { console.error("ERROR: ", err) }
if(err) { return callback(err); }
callback(null, results.rows);
})
});
}
};
module.exports = DB;
I tried with both the done() and client.release() method but no luck. If I use both then I am getting an error message client is already released.
Below is my socket.js file code:
var express = require('express');
const connection = require('./dbpool.js');
if(arData == '0022'){
const queryText = "INSERT INTO alert(alert_data) VALUES('"+arData+"')";
connection.query(queryText,(err, res) => {
if(err){
console.log(err.stack);
}
});
}
if(arData == '0011'){
const queryText = "INSERT INTO table2(alert_data) VALUES('"+arData+"')";
connection.query(queryText,(err, res) => {
if(err){
console.log(err.stack);
}
});
}
function ReverseCommunication(){
const select1 = "SELECT * FROM alert WHERE action = '0' ORDER BY alert_id ASC LIMIT 1";
connection.query(select1, (err, res) =>{
if(err) {
console.log("Error1");
res.json({"error":true});
}
else{
console.log("res==",res);
}
});
}
setInterval(function(){
ReverseCommunication();
}, 2000)
With pool you shouldn't need to close the connection. With pool it will reuse the connection pool for subsequent request so you don't have to connect to the DB each time.
(i'm not a PG expert here, sure other could expand on that way better then I )
What works for us is to set up the dbpool file you have like this
const {Pool,Client} = require('pg');
const pool = new Pool({
user: process.env.POSTGRES_USER,
host: process.env.POSTGRES_URL,
database: process.env.POSTGRES_DATABASE,
password: process.env.POSTGRES_PASSWORD,
port: process.env.POSTGRES_PORT,
keepAlive: true,
connectionTimeoutMillis: 10000, // 10 seconds
max: 10
});
pool.connect()
.then(() => console.log('pg connected'))
.catch(err => console.error(err))
module.exports = pool
Then use the pool.query like you have now with pool.connect
Also, just a side note what lib are you using for PG? Noticed your queries are dynamic, you may want to adjust those to prevent possible SQL-injection.
I have to read a table named dbo.Index from SQL Server in my node js express app but it fails and returns nothing.
Here's my code:
app.get('/getData', (req, res, next) => {
const config = {
user: 'sa',
password: '12345',
server: 'localhost',
database: 'myDB'
}
const pool = new sql.ConnectionPool(config)
pool.connect(err => {
if (err) console.log(err)
console.log('connected.')
const request = new sql.Request(pool)
request.query(`SELECT * FROM dbo.Index`, (error, recordSet) => {
if (err) console.log(error)
res.send(recordSet)
})
})
})
I've tested this code and it works well with other tables but with this specific name dbo.Index, it fails.
It's necessary for me to read it and I can't change the table name (have no permission).
I use node-mssql package in order to connect to the database.
INDEX is a Reserved word, so you will need to wrap it in square parentheses, like so:
SELECT * FROM [dbo].[Index]
It's generally best to try and avoid using reserved words for table or column names as it easily leads to confusion, mistakes and bugs.
I want to know whether there is any existing user with emailid in my db by the name say xyz#abc.com .
function isDuplicateUser(emailId)
{
var sql='SELECT count(*) FROM UserDetails WHERE emailId ="'+emailId+'";';
var con = mysql.createConnection({
host:"localhost",
user: "root",
password: "32577488",
database:"mydb"
});
con.connect(function(err) {
con.query(sql, function (err, result) {
if (err) throw err;
console.log("Inside Duplicate check");
console.log(result[0]);
console.log(result.count);
console.log(result[0].emailId);
console.log(result[0].count);
console.log("1 record inserted");
});
});
}
But in all the console.log statements I am getting undefined ! I thought of using count so that if there is more than 0 than there exists a user with that userid ! Please help me ! I have another doubt also It is ok to get connection in every function ! Like for signup have a
function signup(email,password,name)
{
var sql =''....;
//getting connection once
}
function signin(emailid,password)
{
//getting connection here
return success;
}
It seems like code is getting replicated many times !
please, try with this sql statement and let me know the result:
// replace your var sql with this one
const sql = `SELECT count(*) FROM UserDetails WHERE emailId = '${emailId}'`;
In the other hand, regarding how to manage connection in each function, my recommendation is create a pool of connections in your app and get a connection from the pool on each function you need (you need to take care about releasing the connection when you finish to make them available for new incoming requests):
// connection first time when node starts
const options = {
connectionLimit: 10,
host: HOST,
user: USER,
password: PASSWORD,
database: DATABASE,
port: 3306,
timezone: 'Z',
// debug: true,
multipleStatements: true,
// ...azureCertificate && sslOptions,
};
const pool = mysql.createPool(options);
// on each function, get a connection from the pool using this function
getConnection() {
return new Promise((resolve, reject) => {
if (pool === null) {
return reject(new Error(`MySQL connection didn't established. You must connect first.`));
}
pool.getConnection((err, connection) => {
if (err) {
if (connection) {
connection.release();
}
return reject(err);
}
return resolve(connection);
});
});
}
Hope this helps.
This example was made using npm package mysql: https://www.npmjs.com/package/mysql
I'm using the node-mongodb-native driver with MongoDB to write a website.
I have some questions about how to manage connections:
Is it enough using only one MongoDB connection for all requests? Are there any performance issues? If not, can I setup a global connection to use in the whole application?
If not, is it good if I open a new connection when request arrives, and close it when handled the request? Is it expensive to open and close a connection?
Should I use a global connection pool? I hear the driver has a native connection pool. Is it a good choice?
If I use a connection pool, how many connections should be used?
Are there other things I should notice?
The primary committer to node-mongodb-native says:
You open do MongoClient.connect once when your app boots up and reuse
the db object. It's not a singleton connection pool each .connect
creates a new connection pool.
So, to answer your question directly, reuse the db object that results from MongoClient.connect(). This gives you pooling, and will provide a noticeable speed increase as compared with opening/closing connections on each db action.
Open a new connection when the Node.js application starts, and reuse the existing db connection object:
/server.js
import express from 'express';
import Promise from 'bluebird';
import logger from 'winston';
import { MongoClient } from 'mongodb';
import config from './config';
import usersRestApi from './api/users';
const app = express();
app.use('/api/users', usersRestApi);
app.get('/', (req, res) => {
res.send('Hello World');
});
// Create a MongoDB connection pool and start the application
// after the database connection is ready
MongoClient.connect(config.database.url, { promiseLibrary: Promise }, (err, db) => {
if (err) {
logger.warn(`Failed to connect to the database. ${err.stack}`);
}
app.locals.db = db;
app.listen(config.port, () => {
logger.info(`Node.js app is listening at http://localhost:${config.port}`);
});
});
/api/users.js
import { Router } from 'express';
import { ObjectID } from 'mongodb';
const router = new Router();
router.get('/:id', async (req, res, next) => {
try {
const db = req.app.locals.db;
const id = new ObjectID(req.params.id);
const user = await db.collection('user').findOne({ _id: id }, {
email: 1,
firstName: 1,
lastName: 1
});
if (user) {
user.id = req.params.id;
res.send(user);
} else {
res.sendStatus(404);
}
} catch (err) {
next(err);
}
});
export default router;
Source: How to Open Database Connections in a Node.js/Express App
Here is some code that will manage your MongoDB connections.
var MongoClient = require('mongodb').MongoClient;
var url = require("../config.json")["MongoDBURL"]
var option = {
db:{
numberOfRetries : 5
},
server: {
auto_reconnect: true,
poolSize : 40,
socketOptions: {
connectTimeoutMS: 500
}
},
replSet: {},
mongos: {}
};
function MongoPool(){}
var p_db;
function initPool(cb){
MongoClient.connect(url, option, function(err, db) {
if (err) throw err;
p_db = db;
if(cb && typeof(cb) == 'function')
cb(p_db);
});
return MongoPool;
}
MongoPool.initPool = initPool;
function getInstance(cb){
if(!p_db){
initPool(cb)
}
else{
if(cb && typeof(cb) == 'function')
cb(p_db);
}
}
MongoPool.getInstance = getInstance;
module.exports = MongoPool;
When you start the server, call initPool
require("mongo-pool").initPool();
Then in any other module you can do the following:
var MongoPool = require("mongo-pool");
MongoPool.getInstance(function (db){
// Query your MongoDB database.
});
This is based on MongoDB documentation. Take a look at it.
Manage mongo connection pools in a single self contained module. This approach provides two benefits. Firstly it keeps your code modular and easier to test. Secondly your not forced to mix your database connection up in your request object which is NOT the place for a database connection object. (Given the nature of JavaScript I would consider it highly dangerous to mix in anything to an object constructed by library code). So with that you only need to Consider a module that exports two methods. connect = () => Promise and get = () => dbConnectionObject.
With such a module you can firstly connect to the database
// runs in boot.js or what ever file your application starts with
const db = require('./myAwesomeDbModule');
db.connect()
.then(() => console.log('database connected'))
.then(() => bootMyApplication())
.catch((e) => {
console.error(e);
// Always hard exit on a database connection error
process.exit(1);
});
When in flight your app can simply call get() when it needs a DB connection.
const db = require('./myAwesomeDbModule');
db.get().find(...)... // I have excluded code here to keep the example simple
If you set up your db module in the same way as the following not only will you have a way to ensure that your application will not boot unless you have a database connection you also have a global way of accessing your database connection pool that will error if you have not got a connection.
// myAwesomeDbModule.js
let connection = null;
module.exports.connect = () => new Promise((resolve, reject) => {
MongoClient.connect(url, option, function(err, db) {
if (err) { reject(err); return; };
resolve(db);
connection = db;
});
});
module.exports.get = () => {
if(!connection) {
throw new Error('Call connect first!');
}
return connection;
}
If you have Express.js, you can use express-mongo-db for caching and sharing the MongoDB connection between requests without a pool (since the accepted answer says it is the right way to share the connection).
If not - you can look at its source code and use it in another framework.
You should create a connection as service then reuse it when need.
// db.service.js
import { MongoClient } from "mongodb";
import database from "../config/database";
const dbService = {
db: undefined,
connect: callback => {
MongoClient.connect(database.uri, function(err, data) {
if (err) {
MongoClient.close();
callback(err);
}
dbService.db = data;
console.log("Connected to database");
callback(null);
});
}
};
export default dbService;
my App.js sample
// App Start
dbService.connect(err => {
if (err) {
console.log("Error: ", err);
process.exit(1);
}
server.listen(config.port, () => {
console.log(`Api runnning at ${config.port}`);
});
});
and use it wherever you want with
import dbService from "db.service.js"
const db = dbService.db
I have been using generic-pool with redis connections in my app - I highly recommend it. Its generic and I definitely know it works with mysql so I don't think you'll have any problems with it and mongo
https://github.com/coopernurse/node-pool
I have implemented below code in my project to implement connection pooling in my code so it will create a minimum connection in my project and reuse available connection
/* Mongo.js*/
var MongoClient = require('mongodb').MongoClient;
var url = "mongodb://localhost:27017/yourdatabasename";
var assert = require('assert');
var connection=[];
// Create the database connection
establishConnection = function(callback){
MongoClient.connect(url, { poolSize: 10 },function(err, db) {
assert.equal(null, err);
connection = db
if(typeof callback === 'function' && callback())
callback(connection)
}
)
}
function getconnection(){
return connection
}
module.exports = {
establishConnection:establishConnection,
getconnection:getconnection
}
/*app.js*/
// establish one connection with all other routes will use.
var db = require('./routes/mongo')
db.establishConnection();
//you can also call with callback if you wanna create any collection at starting
/*
db.establishConnection(function(conn){
conn.createCollection("collectionName", function(err, res) {
if (err) throw err;
console.log("Collection created!");
});
};
*/
// anyother route.js
var db = require('./mongo')
router.get('/', function(req, res, next) {
var connection = db.getconnection()
res.send("Hello");
});
If using express there is another more straightforward method, which is to utilise Express's built in feature to share data between routes and modules within your app. There is an object called app.locals. We can attach properties to it and access it from inside our routes. To use it, instantiate your mongo connection in your app.js file.
var app = express();
MongoClient.connect('mongodb://localhost:27017/')
.then(client =>{
const db = client.db('your-db');
const collection = db.collection('your-collection');
app.locals.collection = collection;
});
// view engine setup
app.set('views', path.join(__dirname, 'views'));
This database connection, or indeed any other data you wish to share around the modules of you app can now be accessed within your routes with req.app.locals as below without the need for creating and requiring additional modules.
app.get('/', (req, res) => {
const collection = req.app.locals.collection;
collection.find({}).toArray()
.then(response => res.status(200).json(response))
.catch(error => console.error(error));
});
This method ensures that you have a database connection open for the duration of your app unless you choose to close it at any time. It's easily accessible with req.app.locals.your-collection and doesn't require creation of any additional modules.
Best approach to implement connection pooling is you should create one global array variable which hold db name with connection object returned by MongoClient and then reuse that connection whenever you need to contact Database.
In your Server.js define var global.dbconnections = [];
Create a Service naming connectionService.js. It will have 2 methods getConnection and createConnection.
So when user will call getConnection(), it will find detail in global connection variable and return connection details if already exists else it will call createConnection() and return connection Details.
Call this service using <db_name> and it will return connection object if it already have else it will create new connection and return it to you.
Hope it helps :)
Here is the connectionService.js code:
var mongo = require('mongoskin');
var mongodb = require('mongodb');
var Q = require('q');
var service = {};
service.getConnection = getConnection ;
module.exports = service;
function getConnection(appDB){
var deferred = Q.defer();
var connectionDetails=global.dbconnections.find(item=>item.appDB==appDB)
if(connectionDetails){deferred.resolve(connectionDetails.connection);
}else{createConnection(appDB).then(function(connectionDetails){
deferred.resolve(connectionDetails);})
}
return deferred.promise;
}
function createConnection(appDB){
var deferred = Q.defer();
mongodb.MongoClient.connect(connectionServer + appDB, (err,database)=>
{
if(err) deferred.reject(err.name + ': ' + err.message);
global.dbconnections.push({appDB: appDB, connection: database});
deferred.resolve(database);
})
return deferred.promise;
}
In case anyone wants something that works in 2021 with Typescript, here's what I'm using:
import { MongoClient, Collection } from "mongodb";
const FILE_DB_HOST = process.env.FILE_DB_HOST as string;
const FILE_DB_DATABASE = process.env.FILE_DB_DATABASE as string;
const FILES_COLLECTION = process.env.FILES_COLLECTION as string;
if (!FILE_DB_HOST || !FILE_DB_DATABASE || !FILES_COLLECTION) {
throw "Missing FILE_DB_HOST, FILE_DB_DATABASE, or FILES_COLLECTION environment variables.";
}
const client = new MongoClient(FILE_DB_HOST, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
class Mongoose {
static FilesCollection: Collection;
static async init() {
const connection = await client.connect();
const FileDB = connection.db(FILE_DB_DATABASE);
Mongoose.FilesCollection = FileDB.collection(FILES_COLLECTION);
}
}
Mongoose.init();
export default Mongoose;
I believe if a request occurs too soon (before Mongo.init() has time to finish), an error will be thrown, since Mongoose.FilesCollection will be undefined.
import { Request, Response, NextFunction } from "express";
import Mongoose from "../../mongoose";
export default async function GetFile(req: Request, res: Response, next: NextFunction) {
const files = Mongoose.FilesCollection;
const file = await files.findOne({ fileName: "hello" });
res.send(file);
}
For example, if you call files.findOne({ ... }) and Mongoose.FilesCollection is undefined, then you will get an error.
npm i express mongoose
mongodb.js
const express = require('express');
const mongoose =require('mongoose')
const app = express();
mongoose.set('strictQuery', true);
mongoose.connect('mongodb://localhost:27017/db_name', {
useNewUrlParser: true,
useUnifiedTopology: true
})
.then(() => console.log('MongoDB Connected...'))
.catch((err) => console.log(err))
app.listen(3000,()=>{ console.log("Started on port 3000 !!!") })
node mongodb.js
Using below method you can easily manage as many as possible connection
var mongoose = require('mongoose');
//Set up default mongoose connection
const bankDB = ()=>{
return mongoose.createConnection('mongodb+srv://<username>:<passwprd>#mydemo.jk4nr.mongodb.net/<database>?retryWrites=true&w=majority',options);
}
bankDB().then(()=>console.log('Connected to mongoDB-Atlas bankApp...'))
.catch((err)=>console.error('Could not connected to mongoDB',err));
//Set up second mongoose connection
const myDB = ()=>{
return mongoose.createConnection('mongodb+srv://<username>:<password>#mydemo.jk4nr.mongodb.net/<database>?retryWrites=true&w=majority',options);
}
myDB().then(()=>console.log('Connected to mongoDB-Atlas connection 2...'))
.catch((err)=>console.error('Could not connected to mongoDB',err));
module.exports = { bankDB(), myDB() };
I ran into a mysql connection error which I seem to have fixed by guessing it was a JS variable scope issue.
In the route handler I've commented out the code that was throwing the following MySQL error: PROTOCOL_ENQUEUE_AFTER_FATAL_ERROR, and included the working code below.
require('dotenv').config();
const Hapi = require('hapi');
const mysql = require('mysql');
const Joi = require('joi');
const query = require('./lib/mysql/query');
const server = new Hapi.Server();
server.connection({
host: process.env.SERVER_HOST,
port: process.env.SERVER_PORT
});
const dbOne = mysql.createConnection({
host: process.env.DB_HOST,
user: process.env.DB_USER_ONE,
password: process.env.DB_PASSWORD_ONE
});
const dbTwo = mysql.createConnection({
host: process.env.DB_HOST_TEST,
user: process.env.DB_USER_TWO,
password: process.env.DB_PASSWORD_TWO,
});
server.route({
method: 'GET',
path:'/my-route',
config: {
validate: {
query: {
useDatabase2: Joi.valid('true'),
},
},
},
handler: (req, reply) => {
const useDatabase2 = req.query.useDatabase2 === 'true';
// This didn't work...
/*
const db = useDatabase2 ? dbTwo : dbOne;
db.query(query, (err, rows) => {
if (err) {
reply({
statusCode: 500,
error: "Internal Server Error",
message: err,
}).code(500);
} else {
// ...do something with the data
}
});
*/
// This works...
if (useDatabase2) {
dbTwo.query(query, (err, rows) => {
if (err) {
reply({
statusCode: 500,
error: "Internal Server Error",
message: err,
}).code(500);
} else {
// ...do something with the data
}
});
} else {
dbOne.query(query, (err, rows) => {
if (err) {
reply({
statusCode: 500,
error: "Internal Server Error",
message: err,
}).code(500);
} else {
// ...do something with the data
}
});
}
}
});
server.start(err => {
if (err) throw err;
});
I had assumed that const db was just a reference to the original variable (dbOne or dbTwo) within it's own scope and ceased to exist at the end of each request.
Is the db variable really conflicting with itself and causing the MySQL connection to fail!?
You might wanna look at this thread, which talks about variable reassignment for mysql connections -
https://github.com/mysqljs/mysql/issues/900
Also as a precaution, try adding the 'use strict' directive to your js files. It will help avoid the conflict of variables in your entire application
Finally got to the bottom of this. The variable scope wasn't the issue.
Something was causing the MySQL connection to fail (perhaps the database server's network or load), and as I wasn't pooling the connections, once a connection broke it stayed broke.
I switched mysql.createConnection({... to mysql.createPool({... so when one connection failed another replaced it, and so far so good.