Related
I'm using the node-mongodb-native driver with MongoDB to write a website.
I have some questions about how to manage connections:
Is it enough using only one MongoDB connection for all requests? Are there any performance issues? If not, can I setup a global connection to use in the whole application?
If not, is it good if I open a new connection when request arrives, and close it when handled the request? Is it expensive to open and close a connection?
Should I use a global connection pool? I hear the driver has a native connection pool. Is it a good choice?
If I use a connection pool, how many connections should be used?
Are there other things I should notice?
The primary committer to node-mongodb-native says:
You open do MongoClient.connect once when your app boots up and reuse
the db object. It's not a singleton connection pool each .connect
creates a new connection pool.
So, to answer your question directly, reuse the db object that results from MongoClient.connect(). This gives you pooling, and will provide a noticeable speed increase as compared with opening/closing connections on each db action.
Open a new connection when the Node.js application starts, and reuse the existing db connection object:
/server.js
import express from 'express';
import Promise from 'bluebird';
import logger from 'winston';
import { MongoClient } from 'mongodb';
import config from './config';
import usersRestApi from './api/users';
const app = express();
app.use('/api/users', usersRestApi);
app.get('/', (req, res) => {
res.send('Hello World');
});
// Create a MongoDB connection pool and start the application
// after the database connection is ready
MongoClient.connect(config.database.url, { promiseLibrary: Promise }, (err, db) => {
if (err) {
logger.warn(`Failed to connect to the database. ${err.stack}`);
}
app.locals.db = db;
app.listen(config.port, () => {
logger.info(`Node.js app is listening at http://localhost:${config.port}`);
});
});
/api/users.js
import { Router } from 'express';
import { ObjectID } from 'mongodb';
const router = new Router();
router.get('/:id', async (req, res, next) => {
try {
const db = req.app.locals.db;
const id = new ObjectID(req.params.id);
const user = await db.collection('user').findOne({ _id: id }, {
email: 1,
firstName: 1,
lastName: 1
});
if (user) {
user.id = req.params.id;
res.send(user);
} else {
res.sendStatus(404);
}
} catch (err) {
next(err);
}
});
export default router;
Source: How to Open Database Connections in a Node.js/Express App
Here is some code that will manage your MongoDB connections.
var MongoClient = require('mongodb').MongoClient;
var url = require("../config.json")["MongoDBURL"]
var option = {
db:{
numberOfRetries : 5
},
server: {
auto_reconnect: true,
poolSize : 40,
socketOptions: {
connectTimeoutMS: 500
}
},
replSet: {},
mongos: {}
};
function MongoPool(){}
var p_db;
function initPool(cb){
MongoClient.connect(url, option, function(err, db) {
if (err) throw err;
p_db = db;
if(cb && typeof(cb) == 'function')
cb(p_db);
});
return MongoPool;
}
MongoPool.initPool = initPool;
function getInstance(cb){
if(!p_db){
initPool(cb)
}
else{
if(cb && typeof(cb) == 'function')
cb(p_db);
}
}
MongoPool.getInstance = getInstance;
module.exports = MongoPool;
When you start the server, call initPool
require("mongo-pool").initPool();
Then in any other module you can do the following:
var MongoPool = require("mongo-pool");
MongoPool.getInstance(function (db){
// Query your MongoDB database.
});
This is based on MongoDB documentation. Take a look at it.
Manage mongo connection pools in a single self contained module. This approach provides two benefits. Firstly it keeps your code modular and easier to test. Secondly your not forced to mix your database connection up in your request object which is NOT the place for a database connection object. (Given the nature of JavaScript I would consider it highly dangerous to mix in anything to an object constructed by library code). So with that you only need to Consider a module that exports two methods. connect = () => Promise and get = () => dbConnectionObject.
With such a module you can firstly connect to the database
// runs in boot.js or what ever file your application starts with
const db = require('./myAwesomeDbModule');
db.connect()
.then(() => console.log('database connected'))
.then(() => bootMyApplication())
.catch((e) => {
console.error(e);
// Always hard exit on a database connection error
process.exit(1);
});
When in flight your app can simply call get() when it needs a DB connection.
const db = require('./myAwesomeDbModule');
db.get().find(...)... // I have excluded code here to keep the example simple
If you set up your db module in the same way as the following not only will you have a way to ensure that your application will not boot unless you have a database connection you also have a global way of accessing your database connection pool that will error if you have not got a connection.
// myAwesomeDbModule.js
let connection = null;
module.exports.connect = () => new Promise((resolve, reject) => {
MongoClient.connect(url, option, function(err, db) {
if (err) { reject(err); return; };
resolve(db);
connection = db;
});
});
module.exports.get = () => {
if(!connection) {
throw new Error('Call connect first!');
}
return connection;
}
If you have Express.js, you can use express-mongo-db for caching and sharing the MongoDB connection between requests without a pool (since the accepted answer says it is the right way to share the connection).
If not - you can look at its source code and use it in another framework.
You should create a connection as service then reuse it when need.
// db.service.js
import { MongoClient } from "mongodb";
import database from "../config/database";
const dbService = {
db: undefined,
connect: callback => {
MongoClient.connect(database.uri, function(err, data) {
if (err) {
MongoClient.close();
callback(err);
}
dbService.db = data;
console.log("Connected to database");
callback(null);
});
}
};
export default dbService;
my App.js sample
// App Start
dbService.connect(err => {
if (err) {
console.log("Error: ", err);
process.exit(1);
}
server.listen(config.port, () => {
console.log(`Api runnning at ${config.port}`);
});
});
and use it wherever you want with
import dbService from "db.service.js"
const db = dbService.db
I have been using generic-pool with redis connections in my app - I highly recommend it. Its generic and I definitely know it works with mysql so I don't think you'll have any problems with it and mongo
https://github.com/coopernurse/node-pool
I have implemented below code in my project to implement connection pooling in my code so it will create a minimum connection in my project and reuse available connection
/* Mongo.js*/
var MongoClient = require('mongodb').MongoClient;
var url = "mongodb://localhost:27017/yourdatabasename";
var assert = require('assert');
var connection=[];
// Create the database connection
establishConnection = function(callback){
MongoClient.connect(url, { poolSize: 10 },function(err, db) {
assert.equal(null, err);
connection = db
if(typeof callback === 'function' && callback())
callback(connection)
}
)
}
function getconnection(){
return connection
}
module.exports = {
establishConnection:establishConnection,
getconnection:getconnection
}
/*app.js*/
// establish one connection with all other routes will use.
var db = require('./routes/mongo')
db.establishConnection();
//you can also call with callback if you wanna create any collection at starting
/*
db.establishConnection(function(conn){
conn.createCollection("collectionName", function(err, res) {
if (err) throw err;
console.log("Collection created!");
});
};
*/
// anyother route.js
var db = require('./mongo')
router.get('/', function(req, res, next) {
var connection = db.getconnection()
res.send("Hello");
});
If using express there is another more straightforward method, which is to utilise Express's built in feature to share data between routes and modules within your app. There is an object called app.locals. We can attach properties to it and access it from inside our routes. To use it, instantiate your mongo connection in your app.js file.
var app = express();
MongoClient.connect('mongodb://localhost:27017/')
.then(client =>{
const db = client.db('your-db');
const collection = db.collection('your-collection');
app.locals.collection = collection;
});
// view engine setup
app.set('views', path.join(__dirname, 'views'));
This database connection, or indeed any other data you wish to share around the modules of you app can now be accessed within your routes with req.app.locals as below without the need for creating and requiring additional modules.
app.get('/', (req, res) => {
const collection = req.app.locals.collection;
collection.find({}).toArray()
.then(response => res.status(200).json(response))
.catch(error => console.error(error));
});
This method ensures that you have a database connection open for the duration of your app unless you choose to close it at any time. It's easily accessible with req.app.locals.your-collection and doesn't require creation of any additional modules.
Best approach to implement connection pooling is you should create one global array variable which hold db name with connection object returned by MongoClient and then reuse that connection whenever you need to contact Database.
In your Server.js define var global.dbconnections = [];
Create a Service naming connectionService.js. It will have 2 methods getConnection and createConnection.
So when user will call getConnection(), it will find detail in global connection variable and return connection details if already exists else it will call createConnection() and return connection Details.
Call this service using <db_name> and it will return connection object if it already have else it will create new connection and return it to you.
Hope it helps :)
Here is the connectionService.js code:
var mongo = require('mongoskin');
var mongodb = require('mongodb');
var Q = require('q');
var service = {};
service.getConnection = getConnection ;
module.exports = service;
function getConnection(appDB){
var deferred = Q.defer();
var connectionDetails=global.dbconnections.find(item=>item.appDB==appDB)
if(connectionDetails){deferred.resolve(connectionDetails.connection);
}else{createConnection(appDB).then(function(connectionDetails){
deferred.resolve(connectionDetails);})
}
return deferred.promise;
}
function createConnection(appDB){
var deferred = Q.defer();
mongodb.MongoClient.connect(connectionServer + appDB, (err,database)=>
{
if(err) deferred.reject(err.name + ': ' + err.message);
global.dbconnections.push({appDB: appDB, connection: database});
deferred.resolve(database);
})
return deferred.promise;
}
In case anyone wants something that works in 2021 with Typescript, here's what I'm using:
import { MongoClient, Collection } from "mongodb";
const FILE_DB_HOST = process.env.FILE_DB_HOST as string;
const FILE_DB_DATABASE = process.env.FILE_DB_DATABASE as string;
const FILES_COLLECTION = process.env.FILES_COLLECTION as string;
if (!FILE_DB_HOST || !FILE_DB_DATABASE || !FILES_COLLECTION) {
throw "Missing FILE_DB_HOST, FILE_DB_DATABASE, or FILES_COLLECTION environment variables.";
}
const client = new MongoClient(FILE_DB_HOST, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
class Mongoose {
static FilesCollection: Collection;
static async init() {
const connection = await client.connect();
const FileDB = connection.db(FILE_DB_DATABASE);
Mongoose.FilesCollection = FileDB.collection(FILES_COLLECTION);
}
}
Mongoose.init();
export default Mongoose;
I believe if a request occurs too soon (before Mongo.init() has time to finish), an error will be thrown, since Mongoose.FilesCollection will be undefined.
import { Request, Response, NextFunction } from "express";
import Mongoose from "../../mongoose";
export default async function GetFile(req: Request, res: Response, next: NextFunction) {
const files = Mongoose.FilesCollection;
const file = await files.findOne({ fileName: "hello" });
res.send(file);
}
For example, if you call files.findOne({ ... }) and Mongoose.FilesCollection is undefined, then you will get an error.
npm i express mongoose
mongodb.js
const express = require('express');
const mongoose =require('mongoose')
const app = express();
mongoose.set('strictQuery', true);
mongoose.connect('mongodb://localhost:27017/db_name', {
useNewUrlParser: true,
useUnifiedTopology: true
})
.then(() => console.log('MongoDB Connected...'))
.catch((err) => console.log(err))
app.listen(3000,()=>{ console.log("Started on port 3000 !!!") })
node mongodb.js
Using below method you can easily manage as many as possible connection
var mongoose = require('mongoose');
//Set up default mongoose connection
const bankDB = ()=>{
return mongoose.createConnection('mongodb+srv://<username>:<passwprd>#mydemo.jk4nr.mongodb.net/<database>?retryWrites=true&w=majority',options);
}
bankDB().then(()=>console.log('Connected to mongoDB-Atlas bankApp...'))
.catch((err)=>console.error('Could not connected to mongoDB',err));
//Set up second mongoose connection
const myDB = ()=>{
return mongoose.createConnection('mongodb+srv://<username>:<password>#mydemo.jk4nr.mongodb.net/<database>?retryWrites=true&w=majority',options);
}
myDB().then(()=>console.log('Connected to mongoDB-Atlas connection 2...'))
.catch((err)=>console.error('Could not connected to mongoDB',err));
module.exports = { bankDB(), myDB() };
Created a basic express.js application and added a model (using thinky and rethinkdb) trying to pass the changesfeed to the jade file and unable to figure how to pass the results of the feed. My understanding is that changes() returns infinite cursor. So it is always waiting for new data. How to handle that in express res. Any idea what am I missing here?
var express = require('express');
var router = express.Router();
var thinky = require('thinky')();
var type = thinky.type;
var r = thinky.r;
var User = thinky.createModel('User', {
name: type.string()
});
//end of thinky code to create the model
// GET home page.
router.get('/', function (req, res) {
var user = new User({name: req.query.author});
user.save().then(function(result) {
console.log(result);
});
//User.run().then(function (result) {
//res.render('index', { title: 'Express', result: result });
//});
User.changes().then(function (feed) {
feed.each(function (err, doc) { console.log(doc);}); //pass doc to the res
res.render('index', { title: 'Express', doc: doc}) //doc is undefined when I run the application. Why?
});
});
module.exports = router;
The problem that I believe you are facing is that feed.eachis a loop that is calling the contained function for each item contained in the feed. So to access the doc contained in console.log(doc) you are going to need to either place your code in the function in which doc exists(is in the scope of the variable doc), or you are going to need to make a global variable to store doc value(s).
So for example, assuming doc is a string and that you wish to place all doc's in an array. You would need to start off by creating a variable which has a scope that res.render is in, which for this example will be MYDOCS. Then you would need to append each doc to it, and after that you would simply use MYDOC anytime you are attempting to access a doc outside of the feed.each function.
var MYDOCS=[];
User.changes().then(function (feed){
feed.each(function (err, doc) { MYDOCS.push(doc)});
});
router.get('/', function (req, res) {
var user = new User({name: req.query.author});
user.save().then(function(result) {
console.log(result);
});
//User.run().then(function (result) {
//res.render('index', { title: 'Express', result: result });
//});
res.render('index', { title: 'Express', doc: MYDOCS[0]}) //doc is undefined when I run the application. Why?
});
module.exports = router;
I've written a simple express.js server that handles REST API requests and fetches data from a MongoDB database. When I make a GET request to a specific endpoint ("localhost:8081/api/getUserData"), the promise chain doesn't work the way I want it to, and I still don't understand.
This is the error I get:
"[TypeError: Cannot read property 'db' of undefined]"
var MongoClient = require('mongodb').MongoClient;
var express = require('express');
var app = express();
var rp = require("request-promise");
var cors = require('cors');
// use it before all route definitions
app.use(cors({ origin: '*' }));
/********************** REST API FUNCTIONS **********************/
app.get('/api/getUserData', function (req, res, next) {
var context = {};
console.log("in api getUserData")
context.db_url = 'mongodb://localhost:27017/test';
openDatabaseConnection(context)
.then(getAllUserLocations)
.then(closeDatabaseConnection)
.then(function (context) {
res.send(context.userLocations)
})
.catch(function (error) {
console.log("ERROR :");
console.log(error);
})
})
/********************** END REST API FUNCTIONS **********************/
function getAllUserLocations(context) {
context.db.collection("test").find().toArray().then(function (err, result) {
console.log("Received from db: " + result.length + " objects");
context.userLocations = result;
return context;
});
}
function openDatabaseConnection(context) {
console.log("Opening DB connection...");
return MongoClient.connect(context.db_url)
.then(function (db) {
console.log("DB connection opened.");
context.db = db;
return context;
})
}
function closeDatabaseConnection(context) {
console.log("Closing DB connection");
return context.db.close()
.then(function () {
console.log("DB connection closed");
return context;
})
}
/********************** STARTING SERVER **********************/
var server = app.listen(8081, function () {
var host = server.address().address
var port = server.address().port
console.log("Githex server listening at http://%s:%s", host, port)
})
Any help would be appreciated, and even more with an explanation because I don't understand what I've done wrong.
Thanks!
Just like #adeneo mentioned on the first comment, you are missing the db property. Look at your first function:
app.get('/api/getUserData', function (req, res, next) {
var context = {};
console.log("in api getUserData")
context.db_url = 'mongodb://localhost:27017/test';
openDatabaseConnection(context)
.then(getAllUserLocations)
.then(closeDatabaseConnection)
.then(function (context) {
res.send(context.userLocations)
})
.catch(function (error) {
console.log("ERROR :");
console.log(error);
})
});
Now going through the lines within this function:
You setup context as an empty object
You added a db_url property onto the object, so far you have
context = {db_url: "mongodb://localhost:27017/test}
You pass the context object into the openDatabaseConnection function
Within the openDatabaseConnection function, you return a context object. This new returned object doesn't get set anywhere, it just gets returned and never used. You want to call the getuserlocation() function with that returned value.
So instead of just calling
.then(getAllUserConnection)
I would do
.then(function(context){getAllUserConnection(context);})
That should make use of the returned value and make sure you are using it.
I have a db.js file in which I set up a MongoDB connection. I would like to export the database object into my main app.js file:
// db.js
require('mongodb').MongoClient.connect(/* the URL */, function (err, db) {
module.exports = db;
});
// app.js
var db = require('./db');
app.get('/', function (req, res) {
db.collection(/* … */); // throws error
});
The error is:
TypeError: Object # has no method 'collection'
So, how can I export the db object properly?
The best option, as suggested in the comments by elclanrs, is to export a promise:
// database.js
var MongoClient = require('mongodb').MongoClient,
Q = require('q'),
connect = Q.nbind(MongoClient.connect, MongoClient);
var promise = connect(/* url */);
module.exports = {
connect: function () {
return promise;
}
}
// app.js
var database = require('./database');
database.connect()
.then(function (db) {
app.get('/', function (req, res) {
db.collection(/* … */);
});
})
.catch(function (err) {
console.log('Error connecting to DB:', err);
})
.done();
(I'm using awesome Q library here.)
Below's the old version of my answer, left for the sake of history (but if you don't want to use promises, instead of going that road, you should use Matt's answer).
Its downside is that it will open a connection each time you require('database.js) (bummer!)
// DO NOT USE: left for the sake of history
// database.js
var MongoClient = require('mongodb').MongoClient;
function connect(cb) {
MongoClient.connect(/* the URL */, cb);
}
module.exports = {
connect: connect
}
// app.js
var database = require('./database');
database.connect(function (err, db) {
app.get('/', function (req, res) {
db.collection(/* … */);
});
});
You can't do it as you want to do it, because, quoting the docs:
Note that assignment to module.exports must be done immediately. It cannot be done in any callbacks.
Instead however, you can assign a property of module.exports in a callback, therefore this will work;
// db.js
require('mongodb').MongoClient.connect(/* the URL */, function (err, db) {
module.exports.instance = db;
});
// app.js
var db = require('./db');
// some time later (when `.instance` is available)
app.get('/', function (req, res) {
db.instance.collection(/* … */);
});
However, the some time later is a bit of a pain, so you may just want to use some sort of callback;
// db.js
var queue = [];
var instance = null;
require('mongodb').MongoClient.connect(/* the URL */, function (err, db) {
instance = db;
while (queue.length) {
queue.pop()(instance);
}
});
module.exports.done = function (callback) {
if (instance === null) {
queue.push(callback);
} else {
callback(instance);
}
};
// app.js
require('./db').done(function (db) {
app.get('/', function (req, res) {
db.collection(/* … */);
});
});
The above also handles cases where handlers via done() are attached after the connection has already been made.
The servers typically have 3 phases: init, serve and uninit. This seems obvious but when you start writing servers from scratch (ie. in Java you start inheriting from HttpServlet) sometimes you forget how to do the things...
In the startup phase you must open the db connection (pool) and save the object somewhere (typically in your db.js module). Then in the service phase retrieve the mongodb connection from db.js.
Related: How to get a instance of db from node-mongo native driver?
In your code:
// db.js
require('mongodb').MongoClient.connect(/* the URL */, function (err, db) {
module.exports = db;
});
// app.js
var db = require('./db');
app.get('/', function (req, res) {
db.collection(/* … */); // throws error
});
You've called connect in the db.js class, yet it's asynchronous.
The call in app.js to require is synchronous in behavior though, so it will always receive an undefined value (as the exports will not be assigned to a value at the time the db.js has finished executing).
I'd suggest keeping things simple.
The option I usually use is something where the app code makes the connection and doesn't start listening for HTTP connections until it is complete. Then, I'll initialize each route file by calling a named method and pass the database connection to it.
Or, you could just always call connect in each module, yet cache the value. (The connect call would need to be called within the route callback code so that the routes were defined immediately and not when the connection was actually established).
// db.js
var _db = null;
exports = function(callback) {
if (!_db) {
_db = {}; // only one connection, so we'll stop others from trying
require('mongodb').MongoClient.connect(/* the URL */, function (err, db) {
_db = db;
callback(err, db);
});
} else {
callback(null, _db);
}
};
// app.js
var db = require('./db');
db(function(err, connection) {
// store the connection value here and pass around, or ...
// call this always in each file ...
});
/// or ...
app.get('/', function (req, res) {
db(function(err, connection) {
connection.collection(/* … */);
});
});
Or, you could use MongooseJS (a wrapper for the native NodeJS MongoDB driver) where commands, etc. are queued if the connection isn't available yet ....
I am using Node.js, Express, MongoDB, and Mongoose. I have a function that fetches the largest id number of a document in my MongoDB database and returns it to the program. I have begun modularizing my code, and have migrated that function to another module. I have successfully accessed the function in my main module, but it involves an asynchronous database query. As the function returns a value, I want to assign it to a variable. Unfortunately, When the returned value is assigned to the variable, the variable is actually set to undefined. I was thinking about using event emitters to signal that the query is finished, but that presents two issues as well:
1) I don't think you can do anything in a program AFTER a return statement, which would be what is required.
2) Event Emitters between modules seem very finicky.
Please help me get the variable to be assigned to the correct value. Code for both the main function and the module is below:
(main file) app.js:
//requires and start up app
var express = require('express');
var mongoose = require('mongoose')
, dbURI = 'localhost/test';
var app = express();
var postmodel = require('./models/post').postmodel;
//configures app for general stuff needed such as bodyParser and static file directory
app.configure(function () {
app.use(express.bodyParser());
app.use(express.static(__dirname + '/static'));
});
//configures app for production, connects to mongoLab databse rather than localhost
app.configure('production', function () {
dbURI = 'mongodb://brad.ross.35:lockirlornie#ds037387.mongolab.com:37387/heroku_app6901832';
});
//tries to connect to database.
mongoose.connect(dbURI);
//once connection to database is open, then rest of app runs
mongoose.connection.on('open', function () {
var PostModel = new postmodel();
var Post = PostModel.setupPostSchema();
var largest_id = PostModel.findLargestID(Post);
(module) post.js:
var mongoose = require('mongoose');
module.exports.postmodel = function () {
this.setupPostSchema = function () {
var postSchema = new mongoose.Schema({
title: String,
body: String,
id: Number,
date_created: String
});
var Post = mongoose.model('Post', postSchema);
return Post;
};
this.findLargestID = function (Post) {
Post.find(function (err, posts) {
if (err) {
console.log("error finding largest ID!");
} else {
var largest_id = 0;
for (var post in posts) {
if (posts[post].id >= largest_id) largest_id = posts[post].id;
}
console.log(largest_id);
return largest_id;
}
});
};
};
You need to have findLargestID accept a callback parameter that it will call once largest_id is available:
this.findLargestID = function (Post, callback) {
Post.find(function (err, posts) {
if (err) {
console.log("error finding largest ID!");
callback(err);
} else {
var largest_id = 0;
for (var post in posts) {
if (posts[post].id >= largest_id) largest_id = posts[post].id;
}
console.log(largest_id);
callback(null, largest_id);
}
});
};