I am writing some express middleware that needs to access a database. It is going to be released as a package so I want it to be as self contained as possible. I was wondering how I should handle the connection to the database. It is async(of course), but it only needs to happen once when the package is initialized. Where should this happen?
I was thinking something like this. The problems is, the middleware is passed back right away, before the database is ready.
// App
app.use(myMiddleware({
db: "<db connection string>"
});
// Middleware
module.exports = function(db) {
// Open db
return function(req, res, next) {
// Middleware stuff
}
}
I'd recommend against such a singleton, dependency injection is a better solution here, and a connection per app is hardly scalable. A connection pool might be a better idea.
That said, you can do something like:
var db = null; // to keep our instance
var asked = false; // to not make several requests that override each other
function openDb(db,ready){
// if we already have it, we return the instance
if(db !== null) ready(null,db);
// if it was already asked by another instance, keep track of it
// to avoid multiple requests.
if(asked) asked.push(ready);
asked = [];
openDbCode(db,function(err,result){
if(err) {
ready(err,null); // error case
asked.forEach(function(fn){ fn(err,null); }); // notify waiters of failure
}
db = result; // save the reference
asked.forEach(function(fn){ fn(db,null); }); // notify all waiters
})
}
This function effectively waits for a db for the first asker and then calls everyone on the same instance. Note that this function will use the first connection string provided.
module.exports = function(db) {
return function(req, res, next) {
openDb(db,function(err,db){
if(err) handleDbOpenErrorLogicHere();
// middleware stuff, same db available here, call next to continue
});
};
}
use an async function to wrap your create app code and then app.listen after everything is initialized.
// app.js
import express from "express";
export default async () => {
const app = express();
const asyncMiddleware) = await initMWAsync();
app.use(asyncMiddleware);
return app;
}
// entry point of your program
import createApp from "./app";
const server = createApp()
.then(app => app.listen(app.get("port"), () => {
console.log(
" App is running at http://localhost:%d in %s mode",
app.get("port"),
app.get("env")
);
console.log(" Press CTRL-C to stop\n");
}));
export default server;
Related
I am using serverless on aws with nodejs and mongodb atlas as database
At the moment I am using the trial version which allow maximum 500 connections.
Seems that my code is not disconnecting the database when process end
I am using express to manage it
First I had no connection close thinking that the connection will be closed automatically once the process end but no I had a lot of connections open.
Then I added a middleware to close my connections after the response has been sent, it was not working, I was thinking that serverless was stopping the process once the response was sent.
Not on each route I am closing mongo connection, for example
router.get('/website/:id/page', async (req, res, next) => {
try {
const pages = await pageDataProvider.findByWebsite(req.params.id);
await mongodbDataProvider.close();
res.json(pages);
} catch (error) {
next(error)
}
})
This is how I handle connections with mongo
const MongoClient = require('mongodb').MongoClient
const config = require('../config')
const MONGODB_URI = config.stage === 'test' ?
global.__MONGO_URI__ :
`mongodb+srv://${config.mongodb.username}:${config.mongodb.password}#${config.mongodb.host}/admin?retryWrites=true&w=majority`;
const client = new MongoClient(MONGODB_URI);
let cachedDb = null;
module.exports.connect = async () => {
if (cachedDb) return cachedDb;
await client.connect();
const dbName = config.stage === 'test' ? global.__MONGO_DB_NAME__ : config.stage;
const db = client.db(dbName)
cachedDb = db;
return db;
}
module.exports.close = async () => {
if (!cachedDb) return;
await client.close();
cachedDb = null;
}
I do not understand why I have so many connections open
Step 1
Isolate the call to the MongoClient.connect() function into its own module so that the connections can be reused across functions. Let's create a file mongo-client.js for that:
mongo-client.js:
const { MongoClient } = require('mongodb');
// Export a module-scoped MongoClient promise. By doing this in a separate
// module, the client can be shared across functions.
const client = new MongoClient(process.env.MONGODB_URI);
module.exports = client.connect();
Step 2
Import the new module and use it in function handlers to connect to database.
some-file.js:
const clientPromise = require('./mongodb-client');
// Handler
module.exports.handler = async function(event, context) {
// Get the MongoClient by calling await on the connection promise. Because
// this is a promise, it will only resolve once.
const client = await clientPromise;
// Use the connection to return the name of the connected database for example.
return client.db().databaseName;
}
I think its a programmatic error in your close method. Please have a closer look at
if (!cachedDb) return;
I think it should have been
if (cachedDb != null) return;
As stated in other response, I would strongly advice against closing the DB connections with each request. You should be looking for a pool mechanism, where a connection from the pool is handed to your application. The application can wait till it receives the connection
Closure of the DB connections should be handled at the time when the application is exiting (shutting/going down). This way application will at least try to close the connections gracefully.
Nonetheless, here is an adaptation your program
index.js
const express = require('express')
const app = express()
const port = 3000
const dbProvider = require('./dbProvider');
dbProvider.connect();
app.get('/testConnection',async (req, res, next) => {
console.log('Doing something for fetching the request & closing connection');
dbProvider.close();
console.log('After closing the connection');
})
app.listen(port, () => {
console.log(`Example app listening at http://localhost:${port}`)
})
dbProvider.js
let cachedDb = null;
let db = {};
module.exports.connect = async () => {
if (cachedDb) {
console.log('Returning Cachedb');
return cachedDb;
}
else{
console.log('Not a cachedDB');
}
db.setup = 1;
return db;
}
module.exports.close = async () => {
if (!cachedDb) {
console.log('Since its cached DB not closing the connection');
return;
}
db=null;
return;
}
And here is the console output:
-> node index.js
Not a cachedDB
Example app listening at http://localhost:3000
Doing something for fetching the request & closing connection
Since its cached DB not closing the connection
After closing the connection
According to this: https://docs.atlas.mongodb.com/best-practices-connecting-from-aws-lambda/
It's a good idea to add this line so you keep your connection pool between requests.
context.callbackWaitsForEmptyEventLoop = false;
I have an express app with a few endpoints and am currently testing it using mocha, chai, and chai-http. This was working fine until I added logic for a pooled mongo connection, and started building endpoints that depended on a DB connection. Basically, before I import my API routes and start the app, I want to make sure I'm connected to mongo.
My problem is that I'm having trouble understanding how I can export my app for chai-http but also make sure there is a DB connection before testing any endpoints.
Here, I am connecting to mongo, then in a callback applying my API and starting the app. The problem with this example is that my tests will start before a connection to the database is made, and before any endpoints are defined. I could move app.listen and api(app) outside of the MongoPool.connect() callback, but then I still have the problem of there being no DB connection when tests are running, so my endpoints will fail.
server.js
import express from 'express';
import api from './api';
import MongoPool from './lib/MongoPool';
let app = express();
let port = process.env.PORT || 3000;
MongoPool.connect((err, success) => {
if (err) throw err;
if (success) {
console.log("Connected to db.")
// apply express router endpoints to app
api(app);
app.listen(port, () => {
console.log(`App listening on port ${port}`);
})
} else {
throw "Couldnt connect to db";
}
})
export default app;
How can I test my endpoints using chai-http while making sure there is a pooled connection before tests are actually executed? It feels dirty writing my application in a way that conforms to the tests I'm using. Is this a design problem with my pool implementation? Is there a better way to test my endpoints with chai-http?
Here is the test I'm running
test.js
let chai = require('chai');
let chaiHttp = require('chai-http');
let server = require('../server').default;;
let should = chai.should();
chai.use(chaiHttp);
//Our parent block
describe('Forecast', () => {
/*
* Test the /GET route
*/
describe('/GET forecast', () => {
it('it should GET the forecast', (done) => {
chai.request(server)
.get('/api/forecast?type=grid&lat=39.2667&long=-81.5615')
.end((err, res) => {
res.should.have.status(200);
done();
});
});
});
});
And this is the endpoint I'm testing
/api/forecast.js
import express from 'express';
import MongoPool from '../lib/MongoPool';
let router = express.Router();
let db = MongoPool.db();
router.get('/forecast', (req, res) => {
// do something with DB here
})
export default router;
Thank you for any help
After receiving some good feedback, I found this solution works best for me, based on Gomzy's answer and Vikash Singh's answer.
In server.js I'm connecting to the mongo pool, then emitting the 'ready' event on the express app. Then in the test, I can use before() to wait for 'ready' event to be emitted on the app. Once that happens, I'm good to start executing the test.
server.js
import express from 'express';
import bodyParser from 'body-parser';
import MongoPool from './lib/MongoPool';
let app = express();
let port = process.env.PORT || 5000;
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
(async () => {
await MongoPool.connect();
console.log("Connected to db.");
require('./api').default(app);
app.listen(port, () => {
console.log(`Listening on port ${port}.`)
app.emit("ready");
});
})();
export default app;
test.js
//Require the dev-dependencies
import chai from 'chai';
import chaiHttp from 'chai-http';
import server from '../src/server';
let should = chai.should();
chai.use(chaiHttp);
before(done => {
server.on("ready", () => {
done();
})
})
describe('Forecast', () => {
describe('/GET forecast', () => {
it('it should GET the forecast', (done) => {
chai.request(server)
.get('/api/forecast?type=grid&lat=39.2667&long=-81.5615')
.end((err, res) => {
res.should.have.status(200);
done();
});
});
});
});
Express app is an instance of EventEmitter so we can easily subscribe to events. i.e app can listen for the 'ready' event.
Your server.js file will look like below,
import express from 'express';
import api from './api';
import MongoPool from './lib/MongoPool';
let app = express();
let port = process.env.PORT || 3000;
app.on('ready', function() {
app.listen(3000, function() {
console.log('app is ready');
});
});
MongoPool.connect((err, success) => {
if (err) throw err;
if (success) {
console.log('Connected to db.');
// apply express router endpoints to app
api(app);
// All OK - fire (emit) a ready event.
app.emit('ready');
} else {
throw 'Couldnt connect to db';
}
});
export default app;
Just create a function below to connect to mongo and, make it returns a promise.
then use await to wait for it to connect and return. the function could be like that
function dbconnect(){
return new Promise(function(resolve, reject){
MongoPool.connect((err, success) => {
if (err) reject(err);
if (success) {
resolve({'status' : true})
} else {
reject(new Error({'status' : false}))
}
})
})
}
And then, use
await dbconnect();
api(app);
app.listen(port, () => {
console.log(`App listening on port ${port}`);
})
now await line will wait for the function to connect to DB and then return success or error in case of failure.
This is a kind of solution you can use, but I would not recommend you to do this, what we actually do is.
create services and use those services in routes, don't write DB code directly in routes.
and
while writing tests for routes mock/stub those services, and test services separately in other test cases, where you just pass DB object and service will add functions on that DB objects, so in tests you can connect to DB and pass that object to those services to test functions, it will give you additional benefit, if you want to use dummy/test DB for testing you can set that in test cases.
Use Before function in your tests like below :
describe('Forecast', () => {
before(function(done){
checkMongoPool(done); // this function should wait and ensure mongo connection is established.
});
it('/GET forecast', function(cb){
// write test code here ...
});
});
And you can check mongodb connection like this below methods:
Method 1: just check the readyState property -
mongoose.connection.readyState == 0; // not connected
mongoose.connection.readyState == 1; // connected`
Method 2: use events
mongoose.connection.on('connected', function(){});
mongoose.connection.on('error', function(){});
mongoose.connection.on('disconnected', function(){});
You can use running server instead of a express instance.
Start your server with a private port, then take tests on the running server.
ex: PORT=9876 node server.js
In your test block, use chai.request('http://localhost:9876') (replace with your protocol, server ip...) instead of chai.request(server).
If you're using native mongodb client you could implement reusable pool like:
MongoPool.js
// This creates a pool with default size of 5
// This gives client; You can add few lines to get db if you wish
// connection is a promise
let connection;
module.exports.getConnection = () => {
connection = MongoClient(url).connect()
}
module.exports.getClient = () => connection
Now in your test you could,
const { getConnection } = require('./MongoPool')
...
describe('Forecast', () => {
// get client connection
getConnection()
...
In your route:
...
const { getClient } = require('./MongoPool')
router.get('/forecast', (req, res) => {
// if you made sure you called getConnection() elsewhere in your code, client is a promise (which resolves to mongodb connection pool)
const client = getClient()
// do something with DB here
// then you could do something like client.db('db-name').then(//more).catch()
})
I'll preface this by saying I am new to nodejs in general. Coming from the world of C#, it's a completely different way of thinking for me.
I've gone through a few courses and I'm setting up a little website as sort of a test for myself. And I'm failing!
I'm using socket.io with node, and I'm trying to broadcast a message with the emitter once in a while. I don't care about specific socket points right now (although I will in the future), so the emitter for this should go out to everyone.
I am having trouble accessing the io object from other modules.
I'll post my server.js file, as well as app/index.js, socket/index.js, helpers/index.js and api/index.js. I hope that posting these will show how it's supposed to work.
Ideally, I'd like to keep all socket-related items in the socket module, for consistency. Right now, I'm trying to get a method to run in the helpers module, but ideally the socket module would be better.
Anyway, server.js:
'use strict';
const express = require('express');
const app = express();
const cryptometers = require('./app');
const api = require('./app/api');
const fs = require('fs');
const sources = require('./app/api/sources.json');
app.set('port', process.env.PORT || 3000);
app.set('view engine', 'ejs');
app.use(express.static('public'));
app.use(cryptometers.session);
app.use('/', cryptometers.router);
cryptometers.ioServer(app).listen(app.get('port'), () =>{
console.log('app listening on port ' + app.get('port'));
api.getData(sources[0].source, sources[0].url, app);
setInterval(function(){api.getData(sources[0].source, sources[0].url, app)}, 60000);
});
Standard fare here. I just added a data retriever that calls to an api once every minute, which updates the database.
app/index.js:
'use strict';
const config = require('./config');
// create an IO server instance
let ioServer = app => {
app.locals.globalMarketCap = [];
const server = require('http').Server(app);
const io = require('socket.io')(server);
io.set('transports', ['websocket']);
io.use((socket, next) => {
require('./session')(socket.request, {}, next);
});
require('./socket')(io,app);
return server;
}
// turns router into a module
module.exports = {
router: require('./routes')(),
session: require('./session'),
ioServer,
}
Here I'm initializing socket.io, binding it to the app. It's also where I initialize a local storage array of data. (Is this a good spot to do this??)
socket/index.js:
'use strict';
const h = require('../helpers');
module.exports = (io, app) => {
io.of('/').on('connection', socket =>{
console.log('socket.io connected to client');
if(app.locals.globalMarketCap){
socket.emit('globalMarketCap', JSON.stringify(app.locals.globalMarketCap));
}
})
}
Here I'm responding to connection events, and pushing out the array of data that I defined in the last file above. Again, ideally I'd like all socket type stuff to stay in here.
helpers/index.js:
'use strict';
const router = require('express').Router();
const db = require('../db');
// iterate through the routes object and mount the routes
let _registerRoutes = (routes, method) => {
for(let key in routes){
if(typeof routes[key] === 'object' && routes[key] !== null && !(routes[key] instanceof Array)){
_registerRoutes(routes[key], key);
} else {
// Register the routes
if(method === 'get'){
router.get(key, routes[key]);
} else if(method === 'post'){
router.post(key, routes[key]);
} else {
router.use(routes[key]);
}
}
}
}
let route = routes => {
_registerRoutes(routes);
return router;
}
let updateGlobalMarketCap = (app) =>{
//app.io.socket.emit('globalMarketCap', JSON.stringify(app.locals.globalMarketCap))
}
module.exports = {
route,
updateGlobalMarketCap
}
The commented out line for updateGlobalMarketCap is where my pain is. Trying to get access to the io object there.
api/index.js
'use strict';
const axios = require("axios");
const db = require("../db");
const h = require("../helpers");
let getData = (source, url, app, cryptoMeters) => {
axios
.get(url)
.then(response => {
//console.log(response.data);
response.data["source"] = source;
var data = new db.globalMarketCapModel(response.data);
app.locals.globalMarketCap = response.data;
var query = { source: source};
db.globalMarketCapModel.findOne({
"source":source
}, 'source old_total_market_cap_usd total_market_cap_usd', function(err, market) {
if (market) {
if(market.old_total_market_cap_usd != response.data.total_market_cap_usd
&& market.total_market_cap_usd != response.data.total_market_cap_usd){
response.data["old_total_market_cap_usd"] = market.total_market_cap_usd;
h.updateGlobalMarketCap(app);
}
db.globalMarketCapModel.update(query, response.data, function (err) {
if (err) {
console.log("uhoh")
} else {
return true;
}
});
} else {
data.save(function (err) {
if (err) {
console.log("uhoh")
} else {
return true;
}
})
}
})
return true;
})
.catch(error => {
console.log(error);
return false;
});
}
module.exports = {
getData
}
The getData function here is where a call to the update emitter would take place.
I've considered using standard node event emitters as a solution to my problem, but that might be gumming up the works and there's a simpler answer.
Anyway, thanks for reading, and I'm interested in any commentary on what i've written so far. pitfalls, mistakes, etc. Learning here! :)
There are many different ways to organize your code to accomplish sharing of the io object. Here's one such scheme. You break out your socket.io initialization code into its own module. You give that module two main features:
A constructor function (that you pass the server to) that allows socket.io to initialize itself on your server.
A method to get the io instance after it's been initialized.
This will allow any other code in your project that wants to get access to the io object to do something like this:
const io = require('./io.js').getIO();
Here's how that io module could be structured:
// io.js
// singleton instance of socket.io that is stored here after the
// constructor function is called
let ioInstance;
modules.exports = function(server) {
const io = require('socket.io')(server);
io.set('transports', ['websocket']);
io.use((socket, next) => {
require('./session')(socket.request, {}, next);
});
// save in higher scope so it can be obtained later
ioInstance = io;
return io;
}
// this getIO method is designed for subsequent
// sharing of the io instance with other modules once the module has been initialized
// other modules can do: let io = require("./io.js").getIO();
module.exports.getIO = function() {
if (!ioInstance) {
throw new Error("Must call module constructor function before you can get the IO instance");
}
return ioInstance;
}
And, this module would be initialized like this:
const io = require('./io.js')(server);
Where you pass it your web server so it can hook to that. It has to be initialized like this before anyone can use .getIO() on it. The storage in the module of the ioInstance makes use of the module caching. The module initialization code is only run once. After that, the same exports are just returned each time which have access to the saved ioInstance inside the module.
I'm using the node-mongodb-native driver with MongoDB to write a website.
I have some questions about how to manage connections:
Is it enough using only one MongoDB connection for all requests? Are there any performance issues? If not, can I setup a global connection to use in the whole application?
If not, is it good if I open a new connection when request arrives, and close it when handled the request? Is it expensive to open and close a connection?
Should I use a global connection pool? I hear the driver has a native connection pool. Is it a good choice?
If I use a connection pool, how many connections should be used?
Are there other things I should notice?
The primary committer to node-mongodb-native says:
You open do MongoClient.connect once when your app boots up and reuse
the db object. It's not a singleton connection pool each .connect
creates a new connection pool.
So, to answer your question directly, reuse the db object that results from MongoClient.connect(). This gives you pooling, and will provide a noticeable speed increase as compared with opening/closing connections on each db action.
Open a new connection when the Node.js application starts, and reuse the existing db connection object:
/server.js
import express from 'express';
import Promise from 'bluebird';
import logger from 'winston';
import { MongoClient } from 'mongodb';
import config from './config';
import usersRestApi from './api/users';
const app = express();
app.use('/api/users', usersRestApi);
app.get('/', (req, res) => {
res.send('Hello World');
});
// Create a MongoDB connection pool and start the application
// after the database connection is ready
MongoClient.connect(config.database.url, { promiseLibrary: Promise }, (err, db) => {
if (err) {
logger.warn(`Failed to connect to the database. ${err.stack}`);
}
app.locals.db = db;
app.listen(config.port, () => {
logger.info(`Node.js app is listening at http://localhost:${config.port}`);
});
});
/api/users.js
import { Router } from 'express';
import { ObjectID } from 'mongodb';
const router = new Router();
router.get('/:id', async (req, res, next) => {
try {
const db = req.app.locals.db;
const id = new ObjectID(req.params.id);
const user = await db.collection('user').findOne({ _id: id }, {
email: 1,
firstName: 1,
lastName: 1
});
if (user) {
user.id = req.params.id;
res.send(user);
} else {
res.sendStatus(404);
}
} catch (err) {
next(err);
}
});
export default router;
Source: How to Open Database Connections in a Node.js/Express App
Here is some code that will manage your MongoDB connections.
var MongoClient = require('mongodb').MongoClient;
var url = require("../config.json")["MongoDBURL"]
var option = {
db:{
numberOfRetries : 5
},
server: {
auto_reconnect: true,
poolSize : 40,
socketOptions: {
connectTimeoutMS: 500
}
},
replSet: {},
mongos: {}
};
function MongoPool(){}
var p_db;
function initPool(cb){
MongoClient.connect(url, option, function(err, db) {
if (err) throw err;
p_db = db;
if(cb && typeof(cb) == 'function')
cb(p_db);
});
return MongoPool;
}
MongoPool.initPool = initPool;
function getInstance(cb){
if(!p_db){
initPool(cb)
}
else{
if(cb && typeof(cb) == 'function')
cb(p_db);
}
}
MongoPool.getInstance = getInstance;
module.exports = MongoPool;
When you start the server, call initPool
require("mongo-pool").initPool();
Then in any other module you can do the following:
var MongoPool = require("mongo-pool");
MongoPool.getInstance(function (db){
// Query your MongoDB database.
});
This is based on MongoDB documentation. Take a look at it.
Manage mongo connection pools in a single self contained module. This approach provides two benefits. Firstly it keeps your code modular and easier to test. Secondly your not forced to mix your database connection up in your request object which is NOT the place for a database connection object. (Given the nature of JavaScript I would consider it highly dangerous to mix in anything to an object constructed by library code). So with that you only need to Consider a module that exports two methods. connect = () => Promise and get = () => dbConnectionObject.
With such a module you can firstly connect to the database
// runs in boot.js or what ever file your application starts with
const db = require('./myAwesomeDbModule');
db.connect()
.then(() => console.log('database connected'))
.then(() => bootMyApplication())
.catch((e) => {
console.error(e);
// Always hard exit on a database connection error
process.exit(1);
});
When in flight your app can simply call get() when it needs a DB connection.
const db = require('./myAwesomeDbModule');
db.get().find(...)... // I have excluded code here to keep the example simple
If you set up your db module in the same way as the following not only will you have a way to ensure that your application will not boot unless you have a database connection you also have a global way of accessing your database connection pool that will error if you have not got a connection.
// myAwesomeDbModule.js
let connection = null;
module.exports.connect = () => new Promise((resolve, reject) => {
MongoClient.connect(url, option, function(err, db) {
if (err) { reject(err); return; };
resolve(db);
connection = db;
});
});
module.exports.get = () => {
if(!connection) {
throw new Error('Call connect first!');
}
return connection;
}
If you have Express.js, you can use express-mongo-db for caching and sharing the MongoDB connection between requests without a pool (since the accepted answer says it is the right way to share the connection).
If not - you can look at its source code and use it in another framework.
You should create a connection as service then reuse it when need.
// db.service.js
import { MongoClient } from "mongodb";
import database from "../config/database";
const dbService = {
db: undefined,
connect: callback => {
MongoClient.connect(database.uri, function(err, data) {
if (err) {
MongoClient.close();
callback(err);
}
dbService.db = data;
console.log("Connected to database");
callback(null);
});
}
};
export default dbService;
my App.js sample
// App Start
dbService.connect(err => {
if (err) {
console.log("Error: ", err);
process.exit(1);
}
server.listen(config.port, () => {
console.log(`Api runnning at ${config.port}`);
});
});
and use it wherever you want with
import dbService from "db.service.js"
const db = dbService.db
I have been using generic-pool with redis connections in my app - I highly recommend it. Its generic and I definitely know it works with mysql so I don't think you'll have any problems with it and mongo
https://github.com/coopernurse/node-pool
I have implemented below code in my project to implement connection pooling in my code so it will create a minimum connection in my project and reuse available connection
/* Mongo.js*/
var MongoClient = require('mongodb').MongoClient;
var url = "mongodb://localhost:27017/yourdatabasename";
var assert = require('assert');
var connection=[];
// Create the database connection
establishConnection = function(callback){
MongoClient.connect(url, { poolSize: 10 },function(err, db) {
assert.equal(null, err);
connection = db
if(typeof callback === 'function' && callback())
callback(connection)
}
)
}
function getconnection(){
return connection
}
module.exports = {
establishConnection:establishConnection,
getconnection:getconnection
}
/*app.js*/
// establish one connection with all other routes will use.
var db = require('./routes/mongo')
db.establishConnection();
//you can also call with callback if you wanna create any collection at starting
/*
db.establishConnection(function(conn){
conn.createCollection("collectionName", function(err, res) {
if (err) throw err;
console.log("Collection created!");
});
};
*/
// anyother route.js
var db = require('./mongo')
router.get('/', function(req, res, next) {
var connection = db.getconnection()
res.send("Hello");
});
If using express there is another more straightforward method, which is to utilise Express's built in feature to share data between routes and modules within your app. There is an object called app.locals. We can attach properties to it and access it from inside our routes. To use it, instantiate your mongo connection in your app.js file.
var app = express();
MongoClient.connect('mongodb://localhost:27017/')
.then(client =>{
const db = client.db('your-db');
const collection = db.collection('your-collection');
app.locals.collection = collection;
});
// view engine setup
app.set('views', path.join(__dirname, 'views'));
This database connection, or indeed any other data you wish to share around the modules of you app can now be accessed within your routes with req.app.locals as below without the need for creating and requiring additional modules.
app.get('/', (req, res) => {
const collection = req.app.locals.collection;
collection.find({}).toArray()
.then(response => res.status(200).json(response))
.catch(error => console.error(error));
});
This method ensures that you have a database connection open for the duration of your app unless you choose to close it at any time. It's easily accessible with req.app.locals.your-collection and doesn't require creation of any additional modules.
Best approach to implement connection pooling is you should create one global array variable which hold db name with connection object returned by MongoClient and then reuse that connection whenever you need to contact Database.
In your Server.js define var global.dbconnections = [];
Create a Service naming connectionService.js. It will have 2 methods getConnection and createConnection.
So when user will call getConnection(), it will find detail in global connection variable and return connection details if already exists else it will call createConnection() and return connection Details.
Call this service using <db_name> and it will return connection object if it already have else it will create new connection and return it to you.
Hope it helps :)
Here is the connectionService.js code:
var mongo = require('mongoskin');
var mongodb = require('mongodb');
var Q = require('q');
var service = {};
service.getConnection = getConnection ;
module.exports = service;
function getConnection(appDB){
var deferred = Q.defer();
var connectionDetails=global.dbconnections.find(item=>item.appDB==appDB)
if(connectionDetails){deferred.resolve(connectionDetails.connection);
}else{createConnection(appDB).then(function(connectionDetails){
deferred.resolve(connectionDetails);})
}
return deferred.promise;
}
function createConnection(appDB){
var deferred = Q.defer();
mongodb.MongoClient.connect(connectionServer + appDB, (err,database)=>
{
if(err) deferred.reject(err.name + ': ' + err.message);
global.dbconnections.push({appDB: appDB, connection: database});
deferred.resolve(database);
})
return deferred.promise;
}
In case anyone wants something that works in 2021 with Typescript, here's what I'm using:
import { MongoClient, Collection } from "mongodb";
const FILE_DB_HOST = process.env.FILE_DB_HOST as string;
const FILE_DB_DATABASE = process.env.FILE_DB_DATABASE as string;
const FILES_COLLECTION = process.env.FILES_COLLECTION as string;
if (!FILE_DB_HOST || !FILE_DB_DATABASE || !FILES_COLLECTION) {
throw "Missing FILE_DB_HOST, FILE_DB_DATABASE, or FILES_COLLECTION environment variables.";
}
const client = new MongoClient(FILE_DB_HOST, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
class Mongoose {
static FilesCollection: Collection;
static async init() {
const connection = await client.connect();
const FileDB = connection.db(FILE_DB_DATABASE);
Mongoose.FilesCollection = FileDB.collection(FILES_COLLECTION);
}
}
Mongoose.init();
export default Mongoose;
I believe if a request occurs too soon (before Mongo.init() has time to finish), an error will be thrown, since Mongoose.FilesCollection will be undefined.
import { Request, Response, NextFunction } from "express";
import Mongoose from "../../mongoose";
export default async function GetFile(req: Request, res: Response, next: NextFunction) {
const files = Mongoose.FilesCollection;
const file = await files.findOne({ fileName: "hello" });
res.send(file);
}
For example, if you call files.findOne({ ... }) and Mongoose.FilesCollection is undefined, then you will get an error.
npm i express mongoose
mongodb.js
const express = require('express');
const mongoose =require('mongoose')
const app = express();
mongoose.set('strictQuery', true);
mongoose.connect('mongodb://localhost:27017/db_name', {
useNewUrlParser: true,
useUnifiedTopology: true
})
.then(() => console.log('MongoDB Connected...'))
.catch((err) => console.log(err))
app.listen(3000,()=>{ console.log("Started on port 3000 !!!") })
node mongodb.js
Using below method you can easily manage as many as possible connection
var mongoose = require('mongoose');
//Set up default mongoose connection
const bankDB = ()=>{
return mongoose.createConnection('mongodb+srv://<username>:<passwprd>#mydemo.jk4nr.mongodb.net/<database>?retryWrites=true&w=majority',options);
}
bankDB().then(()=>console.log('Connected to mongoDB-Atlas bankApp...'))
.catch((err)=>console.error('Could not connected to mongoDB',err));
//Set up second mongoose connection
const myDB = ()=>{
return mongoose.createConnection('mongodb+srv://<username>:<password>#mydemo.jk4nr.mongodb.net/<database>?retryWrites=true&w=majority',options);
}
myDB().then(()=>console.log('Connected to mongoDB-Atlas connection 2...'))
.catch((err)=>console.error('Could not connected to mongoDB',err));
module.exports = { bankDB(), myDB() };
I have a db.js file in which I set up a MongoDB connection. I would like to export the database object into my main app.js file:
// db.js
require('mongodb').MongoClient.connect(/* the URL */, function (err, db) {
module.exports = db;
});
// app.js
var db = require('./db');
app.get('/', function (req, res) {
db.collection(/* … */); // throws error
});
The error is:
TypeError: Object # has no method 'collection'
So, how can I export the db object properly?
The best option, as suggested in the comments by elclanrs, is to export a promise:
// database.js
var MongoClient = require('mongodb').MongoClient,
Q = require('q'),
connect = Q.nbind(MongoClient.connect, MongoClient);
var promise = connect(/* url */);
module.exports = {
connect: function () {
return promise;
}
}
// app.js
var database = require('./database');
database.connect()
.then(function (db) {
app.get('/', function (req, res) {
db.collection(/* … */);
});
})
.catch(function (err) {
console.log('Error connecting to DB:', err);
})
.done();
(I'm using awesome Q library here.)
Below's the old version of my answer, left for the sake of history (but if you don't want to use promises, instead of going that road, you should use Matt's answer).
Its downside is that it will open a connection each time you require('database.js) (bummer!)
// DO NOT USE: left for the sake of history
// database.js
var MongoClient = require('mongodb').MongoClient;
function connect(cb) {
MongoClient.connect(/* the URL */, cb);
}
module.exports = {
connect: connect
}
// app.js
var database = require('./database');
database.connect(function (err, db) {
app.get('/', function (req, res) {
db.collection(/* … */);
});
});
You can't do it as you want to do it, because, quoting the docs:
Note that assignment to module.exports must be done immediately. It cannot be done in any callbacks.
Instead however, you can assign a property of module.exports in a callback, therefore this will work;
// db.js
require('mongodb').MongoClient.connect(/* the URL */, function (err, db) {
module.exports.instance = db;
});
// app.js
var db = require('./db');
// some time later (when `.instance` is available)
app.get('/', function (req, res) {
db.instance.collection(/* … */);
});
However, the some time later is a bit of a pain, so you may just want to use some sort of callback;
// db.js
var queue = [];
var instance = null;
require('mongodb').MongoClient.connect(/* the URL */, function (err, db) {
instance = db;
while (queue.length) {
queue.pop()(instance);
}
});
module.exports.done = function (callback) {
if (instance === null) {
queue.push(callback);
} else {
callback(instance);
}
};
// app.js
require('./db').done(function (db) {
app.get('/', function (req, res) {
db.collection(/* … */);
});
});
The above also handles cases where handlers via done() are attached after the connection has already been made.
The servers typically have 3 phases: init, serve and uninit. This seems obvious but when you start writing servers from scratch (ie. in Java you start inheriting from HttpServlet) sometimes you forget how to do the things...
In the startup phase you must open the db connection (pool) and save the object somewhere (typically in your db.js module). Then in the service phase retrieve the mongodb connection from db.js.
Related: How to get a instance of db from node-mongo native driver?
In your code:
// db.js
require('mongodb').MongoClient.connect(/* the URL */, function (err, db) {
module.exports = db;
});
// app.js
var db = require('./db');
app.get('/', function (req, res) {
db.collection(/* … */); // throws error
});
You've called connect in the db.js class, yet it's asynchronous.
The call in app.js to require is synchronous in behavior though, so it will always receive an undefined value (as the exports will not be assigned to a value at the time the db.js has finished executing).
I'd suggest keeping things simple.
The option I usually use is something where the app code makes the connection and doesn't start listening for HTTP connections until it is complete. Then, I'll initialize each route file by calling a named method and pass the database connection to it.
Or, you could just always call connect in each module, yet cache the value. (The connect call would need to be called within the route callback code so that the routes were defined immediately and not when the connection was actually established).
// db.js
var _db = null;
exports = function(callback) {
if (!_db) {
_db = {}; // only one connection, so we'll stop others from trying
require('mongodb').MongoClient.connect(/* the URL */, function (err, db) {
_db = db;
callback(err, db);
});
} else {
callback(null, _db);
}
};
// app.js
var db = require('./db');
db(function(err, connection) {
// store the connection value here and pass around, or ...
// call this always in each file ...
});
/// or ...
app.get('/', function (req, res) {
db(function(err, connection) {
connection.collection(/* … */);
});
});
Or, you could use MongooseJS (a wrapper for the native NodeJS MongoDB driver) where commands, etc. are queued if the connection isn't available yet ....