mongodb: cannot define variable [duplicate] - javascript
I'm using the node-mongodb-native driver with MongoDB to write a website.
I have some questions about how to manage connections:
Is it enough using only one MongoDB connection for all requests? Are there any performance issues? If not, can I setup a global connection to use in the whole application?
If not, is it good if I open a new connection when request arrives, and close it when handled the request? Is it expensive to open and close a connection?
Should I use a global connection pool? I hear the driver has a native connection pool. Is it a good choice?
If I use a connection pool, how many connections should be used?
Are there other things I should notice?
The primary committer to node-mongodb-native says:
You open do MongoClient.connect once when your app boots up and reuse
the db object. It's not a singleton connection pool each .connect
creates a new connection pool.
So, to answer your question directly, reuse the db object that results from MongoClient.connect(). This gives you pooling, and will provide a noticeable speed increase as compared with opening/closing connections on each db action.
Open a new connection when the Node.js application starts, and reuse the existing db connection object:
/server.js
import express from 'express';
import Promise from 'bluebird';
import logger from 'winston';
import { MongoClient } from 'mongodb';
import config from './config';
import usersRestApi from './api/users';
const app = express();
app.use('/api/users', usersRestApi);
app.get('/', (req, res) => {
res.send('Hello World');
});
// Create a MongoDB connection pool and start the application
// after the database connection is ready
MongoClient.connect(config.database.url, { promiseLibrary: Promise }, (err, db) => {
if (err) {
logger.warn(`Failed to connect to the database. ${err.stack}`);
}
app.locals.db = db;
app.listen(config.port, () => {
logger.info(`Node.js app is listening at http://localhost:${config.port}`);
});
});
/api/users.js
import { Router } from 'express';
import { ObjectID } from 'mongodb';
const router = new Router();
router.get('/:id', async (req, res, next) => {
try {
const db = req.app.locals.db;
const id = new ObjectID(req.params.id);
const user = await db.collection('user').findOne({ _id: id }, {
email: 1,
firstName: 1,
lastName: 1
});
if (user) {
user.id = req.params.id;
res.send(user);
} else {
res.sendStatus(404);
}
} catch (err) {
next(err);
}
});
export default router;
Source: How to Open Database Connections in a Node.js/Express App
Here is some code that will manage your MongoDB connections.
var MongoClient = require('mongodb').MongoClient;
var url = require("../config.json")["MongoDBURL"]
var option = {
db:{
numberOfRetries : 5
},
server: {
auto_reconnect: true,
poolSize : 40,
socketOptions: {
connectTimeoutMS: 500
}
},
replSet: {},
mongos: {}
};
function MongoPool(){}
var p_db;
function initPool(cb){
MongoClient.connect(url, option, function(err, db) {
if (err) throw err;
p_db = db;
if(cb && typeof(cb) == 'function')
cb(p_db);
});
return MongoPool;
}
MongoPool.initPool = initPool;
function getInstance(cb){
if(!p_db){
initPool(cb)
}
else{
if(cb && typeof(cb) == 'function')
cb(p_db);
}
}
MongoPool.getInstance = getInstance;
module.exports = MongoPool;
When you start the server, call initPool
require("mongo-pool").initPool();
Then in any other module you can do the following:
var MongoPool = require("mongo-pool");
MongoPool.getInstance(function (db){
// Query your MongoDB database.
});
This is based on MongoDB documentation. Take a look at it.
Manage mongo connection pools in a single self contained module. This approach provides two benefits. Firstly it keeps your code modular and easier to test. Secondly your not forced to mix your database connection up in your request object which is NOT the place for a database connection object. (Given the nature of JavaScript I would consider it highly dangerous to mix in anything to an object constructed by library code). So with that you only need to Consider a module that exports two methods. connect = () => Promise and get = () => dbConnectionObject.
With such a module you can firstly connect to the database
// runs in boot.js or what ever file your application starts with
const db = require('./myAwesomeDbModule');
db.connect()
.then(() => console.log('database connected'))
.then(() => bootMyApplication())
.catch((e) => {
console.error(e);
// Always hard exit on a database connection error
process.exit(1);
});
When in flight your app can simply call get() when it needs a DB connection.
const db = require('./myAwesomeDbModule');
db.get().find(...)... // I have excluded code here to keep the example simple
If you set up your db module in the same way as the following not only will you have a way to ensure that your application will not boot unless you have a database connection you also have a global way of accessing your database connection pool that will error if you have not got a connection.
// myAwesomeDbModule.js
let connection = null;
module.exports.connect = () => new Promise((resolve, reject) => {
MongoClient.connect(url, option, function(err, db) {
if (err) { reject(err); return; };
resolve(db);
connection = db;
});
});
module.exports.get = () => {
if(!connection) {
throw new Error('Call connect first!');
}
return connection;
}
If you have Express.js, you can use express-mongo-db for caching and sharing the MongoDB connection between requests without a pool (since the accepted answer says it is the right way to share the connection).
If not - you can look at its source code and use it in another framework.
You should create a connection as service then reuse it when need.
// db.service.js
import { MongoClient } from "mongodb";
import database from "../config/database";
const dbService = {
db: undefined,
connect: callback => {
MongoClient.connect(database.uri, function(err, data) {
if (err) {
MongoClient.close();
callback(err);
}
dbService.db = data;
console.log("Connected to database");
callback(null);
});
}
};
export default dbService;
my App.js sample
// App Start
dbService.connect(err => {
if (err) {
console.log("Error: ", err);
process.exit(1);
}
server.listen(config.port, () => {
console.log(`Api runnning at ${config.port}`);
});
});
and use it wherever you want with
import dbService from "db.service.js"
const db = dbService.db
I have been using generic-pool with redis connections in my app - I highly recommend it. Its generic and I definitely know it works with mysql so I don't think you'll have any problems with it and mongo
https://github.com/coopernurse/node-pool
I have implemented below code in my project to implement connection pooling in my code so it will create a minimum connection in my project and reuse available connection
/* Mongo.js*/
var MongoClient = require('mongodb').MongoClient;
var url = "mongodb://localhost:27017/yourdatabasename";
var assert = require('assert');
var connection=[];
// Create the database connection
establishConnection = function(callback){
MongoClient.connect(url, { poolSize: 10 },function(err, db) {
assert.equal(null, err);
connection = db
if(typeof callback === 'function' && callback())
callback(connection)
}
)
}
function getconnection(){
return connection
}
module.exports = {
establishConnection:establishConnection,
getconnection:getconnection
}
/*app.js*/
// establish one connection with all other routes will use.
var db = require('./routes/mongo')
db.establishConnection();
//you can also call with callback if you wanna create any collection at starting
/*
db.establishConnection(function(conn){
conn.createCollection("collectionName", function(err, res) {
if (err) throw err;
console.log("Collection created!");
});
};
*/
// anyother route.js
var db = require('./mongo')
router.get('/', function(req, res, next) {
var connection = db.getconnection()
res.send("Hello");
});
If using express there is another more straightforward method, which is to utilise Express's built in feature to share data between routes and modules within your app. There is an object called app.locals. We can attach properties to it and access it from inside our routes. To use it, instantiate your mongo connection in your app.js file.
var app = express();
MongoClient.connect('mongodb://localhost:27017/')
.then(client =>{
const db = client.db('your-db');
const collection = db.collection('your-collection');
app.locals.collection = collection;
});
// view engine setup
app.set('views', path.join(__dirname, 'views'));
This database connection, or indeed any other data you wish to share around the modules of you app can now be accessed within your routes with req.app.locals as below without the need for creating and requiring additional modules.
app.get('/', (req, res) => {
const collection = req.app.locals.collection;
collection.find({}).toArray()
.then(response => res.status(200).json(response))
.catch(error => console.error(error));
});
This method ensures that you have a database connection open for the duration of your app unless you choose to close it at any time. It's easily accessible with req.app.locals.your-collection and doesn't require creation of any additional modules.
Best approach to implement connection pooling is you should create one global array variable which hold db name with connection object returned by MongoClient and then reuse that connection whenever you need to contact Database.
In your Server.js define var global.dbconnections = [];
Create a Service naming connectionService.js. It will have 2 methods getConnection and createConnection.
So when user will call getConnection(), it will find detail in global connection variable and return connection details if already exists else it will call createConnection() and return connection Details.
Call this service using <db_name> and it will return connection object if it already have else it will create new connection and return it to you.
Hope it helps :)
Here is the connectionService.js code:
var mongo = require('mongoskin');
var mongodb = require('mongodb');
var Q = require('q');
var service = {};
service.getConnection = getConnection ;
module.exports = service;
function getConnection(appDB){
var deferred = Q.defer();
var connectionDetails=global.dbconnections.find(item=>item.appDB==appDB)
if(connectionDetails){deferred.resolve(connectionDetails.connection);
}else{createConnection(appDB).then(function(connectionDetails){
deferred.resolve(connectionDetails);})
}
return deferred.promise;
}
function createConnection(appDB){
var deferred = Q.defer();
mongodb.MongoClient.connect(connectionServer + appDB, (err,database)=>
{
if(err) deferred.reject(err.name + ': ' + err.message);
global.dbconnections.push({appDB: appDB, connection: database});
deferred.resolve(database);
})
return deferred.promise;
}
In case anyone wants something that works in 2021 with Typescript, here's what I'm using:
import { MongoClient, Collection } from "mongodb";
const FILE_DB_HOST = process.env.FILE_DB_HOST as string;
const FILE_DB_DATABASE = process.env.FILE_DB_DATABASE as string;
const FILES_COLLECTION = process.env.FILES_COLLECTION as string;
if (!FILE_DB_HOST || !FILE_DB_DATABASE || !FILES_COLLECTION) {
throw "Missing FILE_DB_HOST, FILE_DB_DATABASE, or FILES_COLLECTION environment variables.";
}
const client = new MongoClient(FILE_DB_HOST, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
class Mongoose {
static FilesCollection: Collection;
static async init() {
const connection = await client.connect();
const FileDB = connection.db(FILE_DB_DATABASE);
Mongoose.FilesCollection = FileDB.collection(FILES_COLLECTION);
}
}
Mongoose.init();
export default Mongoose;
I believe if a request occurs too soon (before Mongo.init() has time to finish), an error will be thrown, since Mongoose.FilesCollection will be undefined.
import { Request, Response, NextFunction } from "express";
import Mongoose from "../../mongoose";
export default async function GetFile(req: Request, res: Response, next: NextFunction) {
const files = Mongoose.FilesCollection;
const file = await files.findOne({ fileName: "hello" });
res.send(file);
}
For example, if you call files.findOne({ ... }) and Mongoose.FilesCollection is undefined, then you will get an error.
npm i express mongoose
mongodb.js
const express = require('express');
const mongoose =require('mongoose')
const app = express();
mongoose.set('strictQuery', true);
mongoose.connect('mongodb://localhost:27017/db_name', {
useNewUrlParser: true,
useUnifiedTopology: true
})
.then(() => console.log('MongoDB Connected...'))
.catch((err) => console.log(err))
app.listen(3000,()=>{ console.log("Started on port 3000 !!!") })
node mongodb.js
Using below method you can easily manage as many as possible connection
var mongoose = require('mongoose');
//Set up default mongoose connection
const bankDB = ()=>{
return mongoose.createConnection('mongodb+srv://<username>:<passwprd>#mydemo.jk4nr.mongodb.net/<database>?retryWrites=true&w=majority',options);
}
bankDB().then(()=>console.log('Connected to mongoDB-Atlas bankApp...'))
.catch((err)=>console.error('Could not connected to mongoDB',err));
//Set up second mongoose connection
const myDB = ()=>{
return mongoose.createConnection('mongodb+srv://<username>:<password>#mydemo.jk4nr.mongodb.net/<database>?retryWrites=true&w=majority',options);
}
myDB().then(()=>console.log('Connected to mongoDB-Atlas connection 2...'))
.catch((err)=>console.error('Could not connected to mongoDB',err));
module.exports = { bankDB(), myDB() };
Related
Serverless express close mongodb connexion
I am using serverless on aws with nodejs and mongodb atlas as database At the moment I am using the trial version which allow maximum 500 connections. Seems that my code is not disconnecting the database when process end I am using express to manage it First I had no connection close thinking that the connection will be closed automatically once the process end but no I had a lot of connections open. Then I added a middleware to close my connections after the response has been sent, it was not working, I was thinking that serverless was stopping the process once the response was sent. Not on each route I am closing mongo connection, for example router.get('/website/:id/page', async (req, res, next) => { try { const pages = await pageDataProvider.findByWebsite(req.params.id); await mongodbDataProvider.close(); res.json(pages); } catch (error) { next(error) } }) This is how I handle connections with mongo const MongoClient = require('mongodb').MongoClient const config = require('../config') const MONGODB_URI = config.stage === 'test' ? global.__MONGO_URI__ : `mongodb+srv://${config.mongodb.username}:${config.mongodb.password}#${config.mongodb.host}/admin?retryWrites=true&w=majority`; const client = new MongoClient(MONGODB_URI); let cachedDb = null; module.exports.connect = async () => { if (cachedDb) return cachedDb; await client.connect(); const dbName = config.stage === 'test' ? global.__MONGO_DB_NAME__ : config.stage; const db = client.db(dbName) cachedDb = db; return db; } module.exports.close = async () => { if (!cachedDb) return; await client.close(); cachedDb = null; } I do not understand why I have so many connections open
Step 1 Isolate the call to the MongoClient.connect() function into its own module so that the connections can be reused across functions. Let's create a file mongo-client.js for that: mongo-client.js: const { MongoClient } = require('mongodb'); // Export a module-scoped MongoClient promise. By doing this in a separate // module, the client can be shared across functions. const client = new MongoClient(process.env.MONGODB_URI); module.exports = client.connect(); Step 2 Import the new module and use it in function handlers to connect to database. some-file.js: const clientPromise = require('./mongodb-client'); // Handler module.exports.handler = async function(event, context) { // Get the MongoClient by calling await on the connection promise. Because // this is a promise, it will only resolve once. const client = await clientPromise; // Use the connection to return the name of the connected database for example. return client.db().databaseName; }
I think its a programmatic error in your close method. Please have a closer look at if (!cachedDb) return; I think it should have been if (cachedDb != null) return; As stated in other response, I would strongly advice against closing the DB connections with each request. You should be looking for a pool mechanism, where a connection from the pool is handed to your application. The application can wait till it receives the connection Closure of the DB connections should be handled at the time when the application is exiting (shutting/going down). This way application will at least try to close the connections gracefully. Nonetheless, here is an adaptation your program index.js const express = require('express') const app = express() const port = 3000 const dbProvider = require('./dbProvider'); dbProvider.connect(); app.get('/testConnection',async (req, res, next) => { console.log('Doing something for fetching the request & closing connection'); dbProvider.close(); console.log('After closing the connection'); }) app.listen(port, () => { console.log(`Example app listening at http://localhost:${port}`) }) dbProvider.js let cachedDb = null; let db = {}; module.exports.connect = async () => { if (cachedDb) { console.log('Returning Cachedb'); return cachedDb; } else{ console.log('Not a cachedDB'); } db.setup = 1; return db; } module.exports.close = async () => { if (!cachedDb) { console.log('Since its cached DB not closing the connection'); return; } db=null; return; } And here is the console output: -> node index.js Not a cachedDB Example app listening at http://localhost:3000 Doing something for fetching the request & closing connection Since its cached DB not closing the connection After closing the connection
According to this: https://docs.atlas.mongodb.com/best-practices-connecting-from-aws-lambda/ It's a good idea to add this line so you keep your connection pool between requests. context.callbackWaitsForEmptyEventLoop = false;
Javascript: exporting objects initialized asynchronously [duplicate]
I'm kinda new to module creation and was wondering about module.exports and waiting for async functions (like a mongo connect function for example) to complete and exporting the result. The variables get properly defined using async/await in the module, but when trying to log them by requiring the module, they show up as undefined. If someone could point me in the right direction, that'd be great. Here's the code I've got so far: // module.js const MongoClient = require('mongodb').MongoClient const mongo_host = '127.0.0.1' const mongo_db = 'test' const mongo_port = '27017'; (async module => { var client, db var url = `mongodb://${mongo_host}:${mongo_port}/${mongo_db}` try { // Use connect method to connect to the Server client = await MongoClient.connect(url, { useNewUrlParser: true }) db = client.db(mongo_db) } catch (err) { console.error(err) } finally { // Exporting mongo just to test things console.log(client) // Just to test things I tried logging the client here and it works. It doesn't show 'undefined' like test.js does when trying to console.log it from there module.exports = { client, db } } })(module) And here's the js that requires the module // test.js const {client} = require('./module') console.log(client) // Logs 'undefined' I'm fairly familiar with js and am still actively learning and looking into things like async/await and like features, but yeah... I can't really figure that one out
You have to export synchronously, so its impossible to export client and db directly. However you could export a Promise that resolves to client and db: module.exports = (async function() { const client = await MongoClient.connect(url, { useNewUrlParser: true }); const db = client.db(mongo_db); return { client, db }; })(); So then you can import it as: const {client, db} = await require("yourmodule"); (that has to be in an async function itself) PS: console.error(err) is not a proper error handler, if you cant handle the error just crash
the solution provided above by #Jonas Wilms is working but requires to call requires in an async function each time we want to reuse the connection. an alternative way is to use a callback function to return the mongoDB client object. mongo.js: const MongoClient = require('mongodb').MongoClient; const uri = "mongodb+srv://<user>:<pwd>#<host and port>?retryWrites=true"; const mongoClient = async function(cb) { const client = await MongoClient.connect(uri, { useNewUrlParser: true }); cb(client); }; module.exports = {mongoClient} then we can use mongoClient method in a diffrent file(express route or any other js file). app.js: var client; const mongo = require('path to mongo.js'); mongo.mongoClient((connection) => { client = connection; }); //declare express app and listen.... //simple post reuest to store a student.. app.post('/', async (req, res, next) => { const newStudent = { name: req.body.name, description: req.body.description, studentId: req.body.studetId, image: req.body.image }; try { await client.db('university').collection('students').insertOne({newStudent}); } catch(err) { console.log(err); return res.status(500).json({ error: err}); } return res.status(201).json({ message: 'Student added'}); };
NodeJS and MongoDB losing the definition of a variable even with module.exports
Hello i'm trying to fetch some partner names from my mongodb database and put them into a list of variables. But it for some reason loses it's definition when I try to export it. What's going on? This is the first file. ///// mongodb.js ///// const MongoClient = require('mongodb').MongoClient; const assert = require('assert'); const findDocuments = function(db, callback) { // Get the documents collection const collection = db.collection('partners'); // Find some documents collection.find({}).toArray(function(err, docs) { assert.equal(err, null); callback(docs); }); }; // Connection URL const url = 'mongodb://localhost:27017'; // Database Name const dbName = 'yarle'; // Use connect method to connect to the server MongoClient.connect(url, function(err, client) { assert.equal(null, err); console.log("Connected succesfully to Database"); const db = client.db(dbName); findDocuments(db, function(docs) { module.exports = { partner1: console.log(docs[0]['partner_name']), partner2: console.log(docs[1]['partner_name']), partner3: console.log(docs[2]['partner_name']), }; client.close(); }); }); //console.log(Object.keys(partners[0][0])); And this is the end file. ///// Endfile.ts ///// import { Request, Response } from 'express'; import { PartnersList } from './data.d'; var partners = require( './mongodb.js'); console.log(partners.partner1); const titles = [ partners.partner1, partners.partner2, partners.partner3, ];
Your problem is not with module.exports, it's with asynchronous programming. When you call MongoClient.Connect, the code in your callback does not get executed synchronously. It gets executed some time in the future. You have no control over when that happens. The same thing is true of the findDocument callback. Programming asynchronously is a little trickier, but you will have to learn it to write modern javascript. Asynchrony is a central tenet of nodejs. Read on it, learn examples, and your problem will become clear. Instead of exporting the values of partner1, 2 and 3, export a function with a callback. This new function can call MongoClient.Connect, passing down the callback. Endfile.ts can now call your newly created asynchronous function and assign the titles array in the callback. Like this: const MongoClient = require('mongodb').MongoClient; const assert = require('assert'); const findDocuments = function (db, callback) { // Get the documents collection const collection = db.collection('partners'); // Find some documents collection.find({}).toArray(function (err, docs) { assert.equal(err, null); callback(docs); }); }; // Connection URL const url = 'mongodb://localhost:27017'; // Database Name const dbName = 'yarle'; module.exports.getPartners = (callback) { // Use connect method to connect to the server MongoClient.connect(url, function (err, client) { if (err) { callback(err); return; } console.log("Connected succesfully to Database"); const db = client.db(dbName); findDocuments(db, function (docs) { const partners = { partner1: docs[0]['partner_name'], partner2: docs[1]['partner_name'], partner3: docs[2]['partner_name'] }; callback(null, partners); client.close(); }); }); } and this import { Request, Response } from 'express'; import { PartnersList } from './data.d'; var mongoClient = require('./mongodb.js'); mongoClient.getPartners(function (err, partners) { assert.equal(null, err); const titles = partners; });
What is the correct nodejs architecure for database connection?
I am generally connecting to a database before listening a port like this: //server.js MongoClient.connect(process.env.mongo_url, { useNewUrlParser: true }, (err, client) => { if(err) throw err console.log('db connected') global.db = client.db(database_name); app.listen(process.env.port || 3000, ()=>{ console.log(`Server listening on http://localhost:${process.env.port || 3000}`) }) }); And using it through the connection from the global variable. Lately, I started to add service layers in order to use the same service from several other places. I also integrated make-runnable lib to my services so that I can execute service functions from the terminal. Ex. node usersService.js register <email> <password> for below service. // usersService.js exports.register = (email, password)=>{ // business logic to register the user } require('make-runnable'); // usersController.js import usersService = require('./services/usersService') router.post('/register',async (req, res)=>{ usersService.register(req.body.email, req.body.password) ... }) Since the service needs a db connection, I created a db module and edited the service: // db.js const {MongoClient} = require('mongodb') function connect(){ return new Promise((resolve, reject)=>{ console.log(`Trying to connect to ${process.env.mongo_url}`) MongoClient.connect(process.env.mongo_url, { useNewUrlParser: true }, (err, client) => { if(err) return reject(err) console.log('db connected') resolve(client.db(database_name)) }); }) } exports.connect = connect // usersService.js exports.register = (email, password)=>{ // connect to db if executed from terminal if (require.main === module) var db = await require('../../db.js').connect() } // business logic to register the user } require('make-runnable'); This structure works. But code snippet that I added to service for db connection in case of executing from terminal seems unpleasant. I need to add it to all service functions. How can I structure the project so that I can call each service function from the terminal that requires a db connection? Any architectural suggestions are welcome.
how can i use db.collection() outside of mongodb.connect() [duplicate]
I've been reading and reading and still am confused on what is the best way to share the same database (MongoDb) connection across whole NodeJs app. As I understand connection should be open when app starts and reused between modules. My current idea of the best way is that server.js (main file where everything starts) connects to database and creates object variable that is passed to modules. Once connected this variable will be used by modules code as necessary and this connection stays open. E.g.: var MongoClient = require('mongodb').MongoClient; var mongo = {}; // this is passed to modules and code MongoClient.connect("mongodb://localhost:27017/marankings", function(err, db) { if (!err) { console.log("We are connected"); // these tables will be passed to modules as part of mongo object mongo.dbUsers = db.collection("users"); mongo.dbDisciplines = db.collection("disciplines"); console.log("aaa " + users.getAll()); // displays object and this can be used from inside modules } else console.log(err); }); var users = new(require("./models/user"))(app, mongo); console.log("bbb " + users.getAll()); // not connected at the very first time so displays undefined then another module models/user looks like that: Users = function(app, mongo) { Users.prototype.addUser = function() { console.log("add user"); } Users.prototype.getAll = function() { return "all users " + mongo.dbUsers; } } module.exports = Users; Now I have horrible feeling that this is wrong so are there any obvious problems with this approach and if so how to make it better?
You can create a mongoUtil.js module that has functions to both connect to mongo and return a mongo db instance: const MongoClient = require( 'mongodb' ).MongoClient; const url = "mongodb://localhost:27017"; var _db; module.exports = { connectToServer: function( callback ) { MongoClient.connect( url, { useNewUrlParser: true }, function( err, client ) { _db = client.db('test_db'); return callback( err ); } ); }, getDb: function() { return _db; } }; To use it, you would do this in your app.js: var mongoUtil = require( 'mongoUtil' ); mongoUtil.connectToServer( function( err, client ) { if (err) console.log(err); // start the rest of your app here } ); And then, when you need access to mongo somewhere else, like in another .js file, you can do this: var mongoUtil = require( 'mongoUtil' ); var db = mongoUtil.getDb(); db.collection( 'users' ).find(); The reason this works is that in node, when modules are require'd, they only get loaded/sourced once so you will only ever end up with one instance of _db and mongoUtil.getDb() will always return that same instance. Note, code not tested.
There are many ways this could be tweaked to accept configuration objects in places, but overall it's similar to how you have your code laid out, albeit with more modern JS syntax. Could easily be rewritten to prototypes and callbacks, if that's your requirement. mongo.js const { MongoClient } = require('mongodb'); const config = require('./config'); const Users = require('./Users'); const conf = config.get('mongodb'); class MongoBot { constructor() { const url = `mongodb://${conf.hosts.join(',')}`; this.client = new MongoClient(url, conf.opts); } async init() { await this.client.connect(); console.log('connected'); this.db = this.client.db(conf.db); this.Users = new Users(this.db); } } module.exports = new MongoBot(); Users.js class User { constructor(db) { this.collection = db.collection('users'); } async addUser(user) { const newUser = await this.collection.insertOne(user); return newUser; } } module.exports = User; app.js const mongo = require('./mongo'); async function start() { // other app startup stuff... await mongo.init(); // other app startup stuff... } start(); someFile.js const { Users } = require('./mongo'); async function someFunction(userInfo) { const user = await Users.addUser(userInfo); return user; }
Here's how I do it with contemporary syntax, based on go-oleg's example. Mine is tested and functional. I put some comments in the code. ./db/mongodb.js const MongoClient = require('mongodb').MongoClient const uri = 'mongodb://user:password#localhost:27017/dbName' let _db const connectDB = async (callback) => { try { MongoClient.connect(uri, (err, db) => { _db = db return callback(err) }) } catch (e) { throw e } } const getDB = () => _db const disconnectDB = () => _db.close() module.exports = { connectDB, getDB, disconnectDB } ./index.js // Load MongoDB utils const MongoDB = require('./db/mongodb') // Load queries & mutations const Users = require('./users') // Improve debugging process.on('unhandledRejection', (reason, p) => { console.log('Unhandled Rejection at:', p, 'reason:', reason) }) const seedUser = { name: 'Bob Alice', email: 'test#dev.null', bonusSetting: true } // Connect to MongoDB and put server instantiation code inside // because we start the connection first MongoDB.connectDB(async (err) => { if (err) throw err // Load db & collections const db = MongoDB.getDB() const users = db.collection('users') try { // Run some sample operations // and pass users collection into models const newUser = await Users.createUser(users, seedUser) const listUsers = await Users.getUsers(users) const findUser = await Users.findUserById(users, newUser._id) console.log('CREATE USER') console.log(newUser) console.log('GET ALL USERS') console.log(listUsers) console.log('FIND USER') console.log(findUser) } catch (e) { throw e } const desired = true if (desired) { // Use disconnectDB for clean driver disconnect MongoDB.disconnectDB() process.exit(0) } // Server code anywhere above here inside connectDB() }) ./users/index.js const ObjectID = require('mongodb').ObjectID // Notice how the users collection is passed into the models const createUser = async (users, user) => { try { const results = await users.insertOne(user) return results.ops[0] } catch (e) { throw e } } const getUsers = async (users) => { try { const results = await users.find().toArray() return results } catch (e) { throw e } } const findUserById = async (users, id) => { try { if (!ObjectID.isValid(id)) throw 'Invalid MongoDB ID.' const results = await users.findOne(ObjectID(id)) return results } catch (e) { throw e } } // Export garbage as methods on the Users object module.exports = { createUser, getUsers, findUserById }
If you are using Express, then you can use mongo-express-req module that allows you to get db connection in request object. Install npm install --save mongo-express-req server.js var app = require('express')(); var mongoExpressReq = require('mongo-express-req'); app.use(mongoExpressReq('mongodb://localhost/test')); routes/users.js app.get('/', function (req, res, next) { req.db // => Db object }); Note: mongo-express-req is fork of not maintained express-mongo-db.
A tested solution based on the accepted answer: mongodbutil.js: var MongoClient = require( 'mongodb' ).MongoClient; var _db; module.exports = { connectToServer: function( callback ) { MongoClient.connect( "<connection string>", function( err, client ) { _db = client.db("<database name>"); return callback( err ); } ); }, getDb: function() { return _db; } }; app.js: var createError = require('http-errors'); var express = require('express'); var path = require('path'); var cookieParser = require('cookie-parser'); var logger = require('morgan'); var app = express(); app.set('views', path.join(__dirname, 'views')); app.set('view engine', 'ejs'); app.use(logger('dev')); app.use(express.json()); app.use(express.urlencoded({ extended: false })); app.use(cookieParser()); app.use(express.static(path.join(__dirname, 'public'))); var mongodbutil = require( './mongodbutil' ); mongodbutil.connectToServer( function( err ) { //app goes online once this callback occurs var indexRouter = require('./routes/index'); var usersRouter = require('./routes/users'); var companiesRouter = require('./routes/companies'); var activitiesRouter = require('./routes/activities'); var registerRouter = require('./routes/register'); app.use('/', indexRouter); app.use('/users', usersRouter); app.use('/companies', companiesRouter); app.use('/activities', activitiesRouter); app.use('/register', registerRouter); // catch 404 and forward to error handler app.use(function(req, res, next) { next(createError(404)); }); // error handler app.use(function(err, req, res, next) { res.locals.message = err.message; res.locals.error = req.app.get('env') === 'development' ? err : {}; res.status(err.status || 500); res.render('error'); }); //end of calback }); module.exports = app; activities.js -- a route: var express = require('express'); var router = express.Router(); var mongodbutil = require( '../mongodbutil' ); var db = mongodbutil.getDb(); router.get('/', (req, res, next) => { db.collection('activities').find().toArray((err, results) => { if (err) return console.log(err) res.render('activities', {activities: results, title: "Activities"}) }); }); router.post('/', (req, res) => { db.collection('activities').save(req.body, (err, result) => { if (err) return console.log(err) res.redirect('/activities') }) }); module.exports = router;
Here is my setup in 2020: ./utils/database.js const { MongoClient } = require('mongodb'); class Mongo { constructor () { this.client = new MongoClient("mongodb://127.0.0.1:27017/my-app", { useNewUrlParser: true, useUnifiedTopology: true }); } async main () { await this.client.connect(); console.log('Connected to MongoDB'); this.db = this.client.db(); } } module.exports = new Mongo(); /app.js const mongo = require('./utils/database'); const express = require('express'); const app = express(); const boot = async () => { await mongo.main(); app.listen(3000); }; boot();
go-oleg is basically right, but in these days you (probably) dont want use "mongodb" itself, rather use some framework, which will do a lot of "dirty work" for you. For example, mongoose is one of the most common. This is what we have in our initial server.js file : const mongoose = require('mongoose'); const options = {server: {socketOptions: {keepAlive: 1}}}; mongoose.connect(config.db, options); This is everything what is needed to set it up. Now use this anywhere in your code const mongoose = require('mongoose'); And you get that instance you set up with mongoose.connect
I´m late to the party, but hopefully this answer will help someone, this is a functional code: db.js const MongoClient = require("mongodb").MongoClient const urlMongo = "mongodb://localhost:27017" var db; function connectToServer( callback ) { MongoClient.connect(urlMongo, { useUnifiedTopology: true , useNewUrlParser: true }, function( err, client ) { db = client.db('auth'); return callback( err ); }) } function getDb() { return db } module.exports = {connectToServer, getDb} We export one function to connect to the mongo and another to get de instanceof the connection. app.js const express = require('express') const app = express() const mongo = require('./db.js'); mongo.connectToServer( function( err) { if (err) console.log(err); const auth = require('./modulos') app.post('/login', (req, res) => { auth.login(req, res)}) app.listen(3000, function () { console.log('Corriendo en puerto 3000')}) }); We must do the require of the auth module after we initiallize the connection, otherwise the getDb function will return undefined. module.js const db = require('../db.js').getDb() const usuariosCollection = db.collection('usuarios') function login(req, res){ usuariosCollection.find({ 'username': 'Fran' }).toArray(function (err, doc) { ... }) }
As this is tagged with Express, I thought I would mention that Express has a built in feature to share data between routes. There is an object called app.locals. We can attach properties to it and access it from inside our routes. You simply instantiate your mongo connection in your app.js file. var app = express(); MongoClient.connect('mongodb://localhost:27017/') .then(client =>{ const db = client.db('your-db'); const collection = db.collection('your-collection'); app.locals.collection = collection; }); // view engine setup app.set('views', path.join(__dirname, 'views')); This database connection can now be accessed within your routes as below without the need for creating and requiring additional modules. app.get('/', (req, res) => { const collection = req.app.locals.collection; collection.find({}).toArray() .then(response => res.status(200).json(response)) .catch(error => console.error(error)); }); This method ensures that you have a database connection open for the duration of your app unless you choose to close it at any time. It's easily accessible with req.app.locals.your-collection and doesn't require additional modules.
Initialize the connection as a promise: const MongoClient = require('mongodb').MongoClient const uri = 'mongodb://...' const client = new MongoClient(uri) const connection = client.connect() // initialized connection And then call the connection whenever you wish you perform an action on the database: // if I want to insert into the database... const connect = connection connect.then(() => { const doc = { id: 3 } const db = client.db('database_name') const coll = db.collection('collection_name') coll.insertOne(doc, (err, result) => { if(err) throw err }) })
Here's a suggestion using TypeScript and ES6 features and syntax: db.ts import { Db, MongoClient } from 'mongodb' let client: MongoClient let db: Db const connectToDatabase = async () => { client = new MongoClient('databaseURI') await client.connect() db = client.db('dbname') } export { connectToDatabase, client, db, } index.ts import express from 'express' import { someRouter } from './routes/someRoute' import { connectToDatabase } from './db' connectToDatabase().then(() => { const app = express() app.use('/someRoute', someRouter) const port = process.env.PORT || 5000 app.listen(port, () => { console.log(`Server is listening on port ${port}`) }) }) routes/someRoute.ts import express from 'express' import { db } from '../db' const someRouter = express.Router() someRouter.route('/') .get(async (req, res) => { const results = await db.collection('collectionName').find().toArray() return res.send(results) }) export { someRouter, }
we can create a dbconnection file like dbconnection.js const MongoClient = require('mongodb').MongoClient const mongo_url = process.env.MONGO_URL; module.exports = { connect: async function(callback) { var connection; await new Promise((resolve, reject) => { MongoClient.connect(mongo_url, { useNewUrlParser: true }, (err, database) => { if (err) reject(); else { connection = database; resolve(); } }); }); return connection; } }; and then use this file in the your app like var connection = require('../dbconnection'); and then use like this inside your async function db = await connection.connect(); hope this will work
I find this works well :) mongoUtil.ts import { MongoClient } from 'mongodb'; const uri = 'MONGOSTRING'; let connPoolPromise: any = null; const mongoPoolPromise = () => { if (connPoolPromise) return connPoolPromise; connPoolPromise = new Promise((resolve, reject) => { const conn = new MongoClient(uri, { useNewUrlParser: true, useUnifiedTopology: true, }); if (conn.isConnected()) { return resolve(conn); } else { conn .connect() .then(() => { return resolve(conn.db('DATABASENAME')); }) .catch(err => { console.log(err); reject(err); }); } }); return connPoolPromise; }; export = { mongoPoolPromise, }; anyFile.ts const { mongoPoolPromise } = require('./mongoUtil'); async function getProducts() { const db = await mongoPoolPromise(); const data = await db .collection('myCollection') .find({}) .toArray(); console.log(data); return data; } export { getProducts };
I'm a bit late for this, but I'll add my solution too. It's a much noobier approach compared to the answers here. Anyway if you are using MongoDB version 4.0 and Node.js 3.0 (or higher versions) you can use isConnected() function from the MongoClient. const MongoClient = require('mongodb').MongoClient; const uri = "<your connection url>"; const client = new MongoClient(uri, { useNewUrlParser: true }); if (client.isConnected()) { execute(); } else { client.connect().then(function () { execute(); }); } function execute() { // Do anything here // Ex: client.db("mydb").collection("mycol"); } This worked fine for me. Hope it helps.
Based on accepted answers, I use a simple approach. But use this only if you want to use db inside function which will be executed after some time. For ex: In express route functions, it is the easiest approach you can take. mongo.js const MongoClient = require("mongodb").MongoClient var db const connectDb = (callback) => { if (db) return callback() MongoClient.connect( uri, {ops}, (err, database) => { if (err) return console.log(err) db = database.db("dbName") console.log("Database Connected") callback() } ) } const getDb = (collectionToGet) => { return db.collection(collectionToGet) } module.exports = { connectDb, getDb, } Now, in other files where you want the db object, user.js const { connectDb, getDb } = require('mongo.js') var db // store db object in this object connectDb(() => ( db = getDb("user") )) app.get('/', (req, res) => { // do something with req db.insert({}) // do something with res }
If you opt for using mongoose in your application edit your app.js file with the following snippet app.js const mongoose = require('mongoose'); mongoose.connect('mongodb://localhost:27017/Your_Data_Base_Name', {useNewUrlParser:true}) .then((res) => { console.log(' ########### Connected to mongDB ###########'); }) .catch((err) => { console.log('Error in connecting to mongoDb' + err); });` Next Step: Define Models for your application require them and perform CRUD operation directly for example blogSchema.js const mongoose = require('mongoose'); const Schema = mongoose.Schema; const blogSchema = new Schema({ _id : mongoose.Schema.Types.ObjectId, title : { type : 'String', unique : true, required : true }, description : String, comments : [{type : mongoose.Schema.Types.ObjectId, ref: 'Comment'}] }); module.exports = mongoose.model('Blog', blogSchema); Usage createBlog.js const Blog = require('../models/blogSchema'); exports.createBlog = (req, res, next) => { const blog = new Blog({ _id : new mongoose.Types.ObjectId, title : req.body.title, description : req.body.description, }); blog.save((err, blog) => { if(err){ console.log('Server Error save fun failed'); res.status(500).json({ msg : "Error occured on server side", err : err }) }else{ //do something.... } U don't need to connect to mogoDB always ....
var MongoClient = require('mongodb').MongoClient; var url = 'mongodb://localhost:27017/'; var Pro1; module.exports = { DBConnection:async function() { Pro1 = new Promise(async function(resolve,reject){ MongoClient.connect(url, { useNewUrlParser: true },function(err, db) { if (err) throw err; resolve(db); }); }); }, getDB:async function(Blockchain , Context) { bc = Blockchain; contx = Context; Pro1.then(function(_db) { var dbo = _db.db('dbname'); dbo.collection('collectionname').find().limit(1).skip(0).toArray(function(err,result) { if (err) throw err; console.log(result); }); }); }, closeDB:async function() { Pro1.then(function(_db){ _db.close(); }); } };
const express = require('express') const server = express() const mongoClient = require('./MongoDB.js').client const port = 3000 ;(async () => { await mongoClient.connect() server.listen(port, () => console.log(`Server is listening on port ${port}!`)) })().catch(console.error)
You can use the Singleton Design Pattern to achive cross file usage of your MongoDB connection. Init.mjs /* ################ Controller ################ */ import ctrlLib from '../../controller/lib.mjs'; /* ################ MongoDB ################ */ import mongodb from 'mongodb'; /* ################ Logs ################ */ import log from 'fancy-log'; import chalk from 'chalk'; /** Init MongoDB connection */ export class Init { /** * Check if its first time usage of this class. If true set class instance to this that we always get same instance. * Then get MongoDB details from config.yml and set as global. * In the last step we return the class instance. */ constructor() { if (Init.instance == null) Init.instance = this; const config = ctrlLib.getConfig(); this.MongoURL = config.MongoDB.url; this.MongoName = config.MongoDB.dbname; ({MongoClient: this.MongoClient} = mongodb); return Init.instance; }; // constructor(){ /** Connect to Database and return connection */ async connect() { try { const client = await this.MongoClient.connect( this.MongoURL, {useNewUrlParser: true, useUnifiedTopology: true}, ); this.connection = {'db': client.db(this.MongoName), 'client': client}; return this.connection; } // try { catch (e) { log( `${chalk.red.bold('❌ ERROR')} while try to connect to MongoDB DB ${chalk.white.bold('Error:\n')} ${e}` ); } // catch (e) { }; // async connect() { /** * Return connection for cross file usage * #return {object} */ getConnection() {return this.connection;}; }; // export class Init { app.mjs Make sure to 1x time create your MongoDB connection anywhere inside of your project that you can use it later in other files. /* ################ Services ################ */ import {Init} from './Init.mjs'; (async ()=>{ await new Init().connect(); })().catch(e=>{log('app.mjs - Catch error: ' + e);}); anyOtherFile.mjs /* ################ Services ################ */ import {Init} from './Init.mjs'; /** Subclass of Search which contains lib functions */ class Lib { /** * Find data by using search query and return result. * #param {string} collection - Name of collection * #param {object} query - Search query */ async findOne(collection, query) { const connection = new Init().getConnection(); return await connection.db.collection(collection).findOne(query); }; // async findOne() { }; // class Lib {
Updated for 2022 MongoClient new updates MongoUtil.js (For database connection and return database instance) const { MongoClient } = require('mongodb'); const uri = "your database connection url"; var _db; module.exports = { connectToServer: function (callback) { MongoClient.connect(uri, { useNewUrlParser: true }, function (err, client) { _db = client.db('testdb'); return callback(err); }); }, getDb: function () { //this returns database instance return _db; } }; app.js (You can use in any routes or js by importing mongoUtil) var mongoUtil = require('./mongoUtil'); mongoUtil.connectToServer(function (err, client) { if (err) console.log(err); console.log(`server is running`); insertData(); //or do functions and db queries in any js }); async function insertData() { //Functions should be async var database = mongoUtil.getDb(); var movies = database.collection('movies'); const doc = { title: "Movie title", content: "Movie content", } const result = await movies.insertOne(doc); console.log(`A document was inserted with the _id: ${result.insertedId}`); }
I tried #go-oleg answer and it works pretty well. Inside getDb() , I make sure _db must be defined. And if not defined, I call the connectToServer() so that it will get defined again. After this I don't have to call connectToServer() in the app.js which makes my code clean. let getDb = async() => { if(_db) { return _db } else { _db = await connectToServer() return _db } } And then, I simply call getDb() everywhere. Also, What I observed, It takes about 64ms on first call. After first call it takes about, 2-6ms everytime. I answered here because i have less reputation to comment.
all after long effort my working by this operational method: Please follow this link this is also good solution: https://mrvautin.com/re-use-mongodb-database-connection-in-routes/
Folks, in 2022 there is no need for reconnection logic, the Node.js MongoDB driver handles this all for you (v4+). You can simply connect as described in the official docs. Put this in a db.js file, then you can import client or db anywhere in your app: import { MongoClient, ServerApiVersion } from 'mongodb' const uri = `mongodb+srv://...`; // Create a new MongoClient export const client = new MongoClient(uri, { useNewUrlParser: true, useUnifiedTopology: true, serverApi: ServerApiVersion.v1 }); export const db = client.db('your_db'); When making queries, use try/catch to capture potential connection errors. try { const res = await db.collection("testdata").insertOne({test: Math.random()}); console.log('Inserted', res); } catch(e) { console.error('MONGO ERROR', e); } AFAIK, the Mongo driver will keep retrying forever if the connection is lost. Try it yourself: put the above code in a setInterval and turn off your internet connection for a while then turn it back on, Mongo will automatically reconnect, even after hours of downtime. It will even submit some queries that were made while the connection was down.
Updated for 2023 MongoDB Connection const { MongoClient, ServerApiVersion } = require('mongodb'); const dbconfig = require('./config'); module.exports = { client: client = new MongoClient(dbconfig.uri, { useNewUrlParser: true, useUnifiedTopology: true, serverApi: ServerApiVersion.v1 }), connectToDB: async () => { try { await client.connect() console.log('connected!') } catch (err) { console.log('Err', err) } } } In your controller const db = require('../config/mongodb.connection') const hha_data = db.client.db('hha-sit').collection('hnh-data') exports.addNewCustomer = async (req, res) => { try { await db.connectToDB() let result = await hha_data.findOne({}, { account_id: 'MDB829001337' }) console.log('result', result) } catch (err) { console.error('Connection Error !', err) } finally { await db.client.close() } res.send('Hi') } Please feel free to revise it if you have any suggestions. :)
This approach is correct, and it can be improved in the following ways: 1.Wrap the MongoClient connect function inside a module and export it as a singleton object to be used across your application. This way, you can make sure only one connection is established to the MongoDB server and is reused across your modules. 2.Add error handling to your code to handle potential issues like a connection failure. 3.Use the MongoDB native driver's connection pooling feature instead of maintaining a single connection throughout the application's lifetime, as this can lead to resource exhaustion and poor performance. This is an example of a improved implementation: const MongoClient = require('mongodb').MongoClient; let _db; const connectToDb = async (url) => { if (db) return db; let client; try { client = await MongoClient.connect(url, { useNewUrlParser: true, useUnifiedTopology: true }); _db = client.db(); } catch (err) { console.error('Error connecting to MongoDB: ', err); process.exit(1); } return _db; }; module.exports = connectToDb; const connectToDb = require('./db'); const userModule = async (app) => { const db = await connectToDb('mongodb://localhost:27017/marankings'); return { addUser: () => console.log('add user'), getAll: () => 'all users' }; }; module.exports = userModule; const userModule = require('./userModule'); (async () => { const users = await userModule(); console.log(users.getAll()); })();