How to rerun the request handler in node + express? - javascript

I am running node + express + mongojs. Here is a sample code:
function mongoCallback(req, res) {
"use strict";
return function (err, o) {
if (err) {
res.send(500, err.message);
} else if (!o) {
res.send(404);
} else {
res.send(o);
}
};
}
var express, app, params, mongo, db;
express = require('express');
params = require('express-params');
app = express();
params.extend(app);
app.use("/", express.static('web'));
mongo = require('mongojs');
db = mongo.connect('mydb', ['inventory']);
app.get('/api/inventory', function (req, res) {
db.inventory.find(mongoCallback(req, res));
});
app.listen(8000);
console.log('Listening on port 8000');
Sometimes I forget running mongod and an attempt to talk to the database fails with "failed to connect to ..." error. The problem is that starting mongod is not enough, the already existing db object seems to remember that no connection could be made and so the server continues to fail, even if mongod is already running.
So, I have come up with the following solution:
var express, app, params, mongo, db, api;
if (!String.prototype.startsWith) {
String.prototype.startsWith = function (str) {
"use strict";
return this.lastIndexOf(str, 0) === 0;
};
}
function setDB() {
db = mongo.connect('IF', ['invoices', 'const', 'inventory']);
}
function mongoCallback(req, res, next, caller, secondTry) {
return function (err, o) {
if (err) {
if (!secondTry && err.message && err.message.startsWith("failed to connect to")) {
setDB();
caller(req, res, next, true);
} else {
res.send(500, err.message);
}
} else if (!o) {
res.send(404);
} else {
res.send(o);
}
};
}
express = require('express');
params = require('express-params');
app = express();
params.extend(app);
app.use("/", express["static"]('web'));
mongo = require('mongojs');
setDB();
api = {
getInventory: function (req, res, next, secondTry) {
db.inventory.find(mongoCallback(req, res, next, api.getInventory, secondTry));
}
};
app.get('/api/inventory', api.getInventory);
app.listen(8000);
console.log('Listening on port 8000');
Basically it recreates the db object if a request fails with the "failed to connect to" error and reruns the request. This is done only for the first failure. A subsequent failure returns the error.
I do not like my solution at all. There has to be a better way. Any suggestions?
Thanks.

What do you mean by "the already existing db object seems to remember that no connection could be made"? Do you mean that the queries on the database fail if you start the express app before running mongod? Since you are connecting to the DB at the startup of the express app, you should have the mongod running first.
If you are worried about the database going down after the initial connection and causing your CRUD operations to fail, you can check for an error in your operations
db.inventory.find(function(err, docs) {
// check err to see if there was a connection issue
});
and then reconnect if there was an error.

As far as I know the mongodb native driver allows to set { auto_reconnect:true }, have you tried to set this?
I'm not sure how this behaves if the database wasn't running at all, mongoose.js for example caches all requests until the DB is ready and issues them after a successful connection.

Related

Having difficulty returning a value from an async function

I'm new to Node and have previously just written Javascript for simple browser extensions. What I'm trying to do is run a shell script and then return the text for it on a get request (simplified version).
I've tried looking at callbacks and can't seem to get my head around it or even adapt another example to what I'm trying to do. My main problem is either that the I'm receiving the error "first argument must be one of type string or buffer. received type undefined" or "received type function" (when I tried to implement a callback, which is what I believe I need to do here?).
I've looked at a few examples of callbacks and promises and seeing them in abstraction (or other contexts) just isn't making sense to me so was hoping someone could help direct me in the right direction?
The code is very crude, but just trying to get some basic functionality before expanding it any further.
var express = require("express");
var app = express();
const { exec } = require("child_process");
var ifcfg = function(callback) {
exec("ifconfig", (error, stdout, stderr) => {
if (error) {
console.log(`error: ${error.message}`);
return error;
}
if (stderr) {
console.log(`stderr: ${stderr}`);
return err;
} else {
var output = stdout.toString();
return callback(output);
}
});
}
app.get("/ifconfig", (req, res) => res.write(ifcfg(data)));
var port = process.env.PORT || 8080;
app.listen(port, function() {
console.log("Listening on " + port);
});
In JavaScript, a callback is a function passed into another function as an argument to be executed later.
Since the command is executed asynchronously you will want to use a callback to handle the return value once the command has finished executing:
var express = require("express");
var app = express();
const { exec } = require("child_process");
function os_func() {
this.execCommand = function(cmd, callback) {
exec(cmd, (error, stdout, stderr) => {
if (error) {
console.error(`exec error: ${error}`);
return;
}
callback(stdout);
});
}
}
app.get("/", (req, res) => {
console.log("InsideGetss");
var os = new os_func();
os.execCommand('ifconfig', function (returnvalue) {
res.end(returnvalue)
});
});

kill child process exec

I need your help.
I want to get the public IP address of my Beaglebone via ifconfig.me.
If I have an existing internet connection it works fine. If I don't have an internet connection the request should be aborted.
Here is my code:
function publicIP_www(callback){
try{
exec('curl ifconfig.me',{timeout:3000}, function(error, stdout, stderr){
callback(stdout); });
}
catch (err){
callback("000.000.000.000");
}
}
The returned IP address is then displayed on a web site in the browser.
If there is no internet connection, the browser calculates forever. It seems as if the call exec ...... is not terminated.
I'm looking forward to your support and hope that someone can tell me what I'm doing wrong.
best regards Hans
It's difficult to predict why it is in your case not working, due to not to be able see your code. But you can try next that works fine. Of Course it's dirty, just is an example.
Next code is for index.js file of the "Node.js Express App + Jade" project that was created from template in WebStorm IDE.
const util = require('util');
const exec = util.promisify(require('child_process').exec);
....
....
router.get('/', async function(req, res, next) {
try {
const {stdout, stderr} = await exec('curl ifconfig.me');
res.render('index', { title: stdout});
}
catch (err) {
res.render('index',{ title: "000.000.000.000"});
}
});
OR use
const util = require('util');
const exec = require('child_process').exec;
function publicIP_www(callback){
exec('curl ifconfig.me',{timeout:3000}, function(error, stdout, stderr){
if (error) {
return callback("000.000.000.000");
}
callback(stdout);
});
}
router.get('/', function(req, res, next) {
publicIP_www((title) => {
res.render('index', { title });
})
});

mongodb: cannot define variable [duplicate]

I'm using the node-mongodb-native driver with MongoDB to write a website.
I have some questions about how to manage connections:
Is it enough using only one MongoDB connection for all requests? Are there any performance issues? If not, can I setup a global connection to use in the whole application?
If not, is it good if I open a new connection when request arrives, and close it when handled the request? Is it expensive to open and close a connection?
Should I use a global connection pool? I hear the driver has a native connection pool. Is it a good choice?
If I use a connection pool, how many connections should be used?
Are there other things I should notice?
The primary committer to node-mongodb-native says:
You open do MongoClient.connect once when your app boots up and reuse
the db object. It's not a singleton connection pool each .connect
creates a new connection pool.
So, to answer your question directly, reuse the db object that results from MongoClient.connect(). This gives you pooling, and will provide a noticeable speed increase as compared with opening/closing connections on each db action.
Open a new connection when the Node.js application starts, and reuse the existing db connection object:
/server.js
import express from 'express';
import Promise from 'bluebird';
import logger from 'winston';
import { MongoClient } from 'mongodb';
import config from './config';
import usersRestApi from './api/users';
const app = express();
app.use('/api/users', usersRestApi);
app.get('/', (req, res) => {
res.send('Hello World');
});
// Create a MongoDB connection pool and start the application
// after the database connection is ready
MongoClient.connect(config.database.url, { promiseLibrary: Promise }, (err, db) => {
if (err) {
logger.warn(`Failed to connect to the database. ${err.stack}`);
}
app.locals.db = db;
app.listen(config.port, () => {
logger.info(`Node.js app is listening at http://localhost:${config.port}`);
});
});
/api/users.js
import { Router } from 'express';
import { ObjectID } from 'mongodb';
const router = new Router();
router.get('/:id', async (req, res, next) => {
try {
const db = req.app.locals.db;
const id = new ObjectID(req.params.id);
const user = await db.collection('user').findOne({ _id: id }, {
email: 1,
firstName: 1,
lastName: 1
});
if (user) {
user.id = req.params.id;
res.send(user);
} else {
res.sendStatus(404);
}
} catch (err) {
next(err);
}
});
export default router;
Source: How to Open Database Connections in a Node.js/Express App
Here is some code that will manage your MongoDB connections.
var MongoClient = require('mongodb').MongoClient;
var url = require("../config.json")["MongoDBURL"]
var option = {
db:{
numberOfRetries : 5
},
server: {
auto_reconnect: true,
poolSize : 40,
socketOptions: {
connectTimeoutMS: 500
}
},
replSet: {},
mongos: {}
};
function MongoPool(){}
var p_db;
function initPool(cb){
MongoClient.connect(url, option, function(err, db) {
if (err) throw err;
p_db = db;
if(cb && typeof(cb) == 'function')
cb(p_db);
});
return MongoPool;
}
MongoPool.initPool = initPool;
function getInstance(cb){
if(!p_db){
initPool(cb)
}
else{
if(cb && typeof(cb) == 'function')
cb(p_db);
}
}
MongoPool.getInstance = getInstance;
module.exports = MongoPool;
When you start the server, call initPool
require("mongo-pool").initPool();
Then in any other module you can do the following:
var MongoPool = require("mongo-pool");
MongoPool.getInstance(function (db){
// Query your MongoDB database.
});
This is based on MongoDB documentation. Take a look at it.
Manage mongo connection pools in a single self contained module. This approach provides two benefits. Firstly it keeps your code modular and easier to test. Secondly your not forced to mix your database connection up in your request object which is NOT the place for a database connection object. (Given the nature of JavaScript I would consider it highly dangerous to mix in anything to an object constructed by library code). So with that you only need to Consider a module that exports two methods. connect = () => Promise and get = () => dbConnectionObject.
With such a module you can firstly connect to the database
// runs in boot.js or what ever file your application starts with
const db = require('./myAwesomeDbModule');
db.connect()
.then(() => console.log('database connected'))
.then(() => bootMyApplication())
.catch((e) => {
console.error(e);
// Always hard exit on a database connection error
process.exit(1);
});
When in flight your app can simply call get() when it needs a DB connection.
const db = require('./myAwesomeDbModule');
db.get().find(...)... // I have excluded code here to keep the example simple
If you set up your db module in the same way as the following not only will you have a way to ensure that your application will not boot unless you have a database connection you also have a global way of accessing your database connection pool that will error if you have not got a connection.
// myAwesomeDbModule.js
let connection = null;
module.exports.connect = () => new Promise((resolve, reject) => {
MongoClient.connect(url, option, function(err, db) {
if (err) { reject(err); return; };
resolve(db);
connection = db;
});
});
module.exports.get = () => {
if(!connection) {
throw new Error('Call connect first!');
}
return connection;
}
If you have Express.js, you can use express-mongo-db for caching and sharing the MongoDB connection between requests without a pool (since the accepted answer says it is the right way to share the connection).
If not - you can look at its source code and use it in another framework.
You should create a connection as service then reuse it when need.
// db.service.js
import { MongoClient } from "mongodb";
import database from "../config/database";
const dbService = {
db: undefined,
connect: callback => {
MongoClient.connect(database.uri, function(err, data) {
if (err) {
MongoClient.close();
callback(err);
}
dbService.db = data;
console.log("Connected to database");
callback(null);
});
}
};
export default dbService;
my App.js sample
// App Start
dbService.connect(err => {
if (err) {
console.log("Error: ", err);
process.exit(1);
}
server.listen(config.port, () => {
console.log(`Api runnning at ${config.port}`);
});
});
and use it wherever you want with
import dbService from "db.service.js"
const db = dbService.db
I have been using generic-pool with redis connections in my app - I highly recommend it. Its generic and I definitely know it works with mysql so I don't think you'll have any problems with it and mongo
https://github.com/coopernurse/node-pool
I have implemented below code in my project to implement connection pooling in my code so it will create a minimum connection in my project and reuse available connection
/* Mongo.js*/
var MongoClient = require('mongodb').MongoClient;
var url = "mongodb://localhost:27017/yourdatabasename";
var assert = require('assert');
var connection=[];
// Create the database connection
establishConnection = function(callback){
MongoClient.connect(url, { poolSize: 10 },function(err, db) {
assert.equal(null, err);
connection = db
if(typeof callback === 'function' && callback())
callback(connection)
}
)
}
function getconnection(){
return connection
}
module.exports = {
establishConnection:establishConnection,
getconnection:getconnection
}
/*app.js*/
// establish one connection with all other routes will use.
var db = require('./routes/mongo')
db.establishConnection();
//you can also call with callback if you wanna create any collection at starting
/*
db.establishConnection(function(conn){
conn.createCollection("collectionName", function(err, res) {
if (err) throw err;
console.log("Collection created!");
});
};
*/
// anyother route.js
var db = require('./mongo')
router.get('/', function(req, res, next) {
var connection = db.getconnection()
res.send("Hello");
});
If using express there is another more straightforward method, which is to utilise Express's built in feature to share data between routes and modules within your app. There is an object called app.locals. We can attach properties to it and access it from inside our routes. To use it, instantiate your mongo connection in your app.js file.
var app = express();
MongoClient.connect('mongodb://localhost:27017/')
.then(client =>{
const db = client.db('your-db');
const collection = db.collection('your-collection');
app.locals.collection = collection;
});
// view engine setup
app.set('views', path.join(__dirname, 'views'));
This database connection, or indeed any other data you wish to share around the modules of you app can now be accessed within your routes with req.app.locals as below without the need for creating and requiring additional modules.
app.get('/', (req, res) => {
const collection = req.app.locals.collection;
collection.find({}).toArray()
.then(response => res.status(200).json(response))
.catch(error => console.error(error));
});
This method ensures that you have a database connection open for the duration of your app unless you choose to close it at any time. It's easily accessible with req.app.locals.your-collection and doesn't require creation of any additional modules.
Best approach to implement connection pooling is you should create one global array variable which hold db name with connection object returned by MongoClient and then reuse that connection whenever you need to contact Database.
In your Server.js define var global.dbconnections = [];
Create a Service naming connectionService.js. It will have 2 methods getConnection and createConnection.
So when user will call getConnection(), it will find detail in global connection variable and return connection details if already exists else it will call createConnection() and return connection Details.
Call this service using <db_name> and it will return connection object if it already have else it will create new connection and return it to you.
Hope it helps :)
Here is the connectionService.js code:
var mongo = require('mongoskin');
var mongodb = require('mongodb');
var Q = require('q');
var service = {};
service.getConnection = getConnection ;
module.exports = service;
function getConnection(appDB){
var deferred = Q.defer();
var connectionDetails=global.dbconnections.find(item=>item.appDB==appDB)
if(connectionDetails){deferred.resolve(connectionDetails.connection);
}else{createConnection(appDB).then(function(connectionDetails){
deferred.resolve(connectionDetails);})
}
return deferred.promise;
}
function createConnection(appDB){
var deferred = Q.defer();
mongodb.MongoClient.connect(connectionServer + appDB, (err,database)=>
{
if(err) deferred.reject(err.name + ': ' + err.message);
global.dbconnections.push({appDB: appDB, connection: database});
deferred.resolve(database);
})
return deferred.promise;
}
In case anyone wants something that works in 2021 with Typescript, here's what I'm using:
import { MongoClient, Collection } from "mongodb";
const FILE_DB_HOST = process.env.FILE_DB_HOST as string;
const FILE_DB_DATABASE = process.env.FILE_DB_DATABASE as string;
const FILES_COLLECTION = process.env.FILES_COLLECTION as string;
if (!FILE_DB_HOST || !FILE_DB_DATABASE || !FILES_COLLECTION) {
throw "Missing FILE_DB_HOST, FILE_DB_DATABASE, or FILES_COLLECTION environment variables.";
}
const client = new MongoClient(FILE_DB_HOST, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
class Mongoose {
static FilesCollection: Collection;
static async init() {
const connection = await client.connect();
const FileDB = connection.db(FILE_DB_DATABASE);
Mongoose.FilesCollection = FileDB.collection(FILES_COLLECTION);
}
}
Mongoose.init();
export default Mongoose;
I believe if a request occurs too soon (before Mongo.init() has time to finish), an error will be thrown, since Mongoose.FilesCollection will be undefined.
import { Request, Response, NextFunction } from "express";
import Mongoose from "../../mongoose";
export default async function GetFile(req: Request, res: Response, next: NextFunction) {
const files = Mongoose.FilesCollection;
const file = await files.findOne({ fileName: "hello" });
res.send(file);
}
For example, if you call files.findOne({ ... }) and Mongoose.FilesCollection is undefined, then you will get an error.
npm i express mongoose
mongodb.js
const express = require('express');
const mongoose =require('mongoose')
const app = express();
mongoose.set('strictQuery', true);
mongoose.connect('mongodb://localhost:27017/db_name', {
useNewUrlParser: true,
useUnifiedTopology: true
})
.then(() => console.log('MongoDB Connected...'))
.catch((err) => console.log(err))
app.listen(3000,()=>{ console.log("Started on port 3000 !!!") })
node mongodb.js
Using below method you can easily manage as many as possible connection
var mongoose = require('mongoose');
//Set up default mongoose connection
const bankDB = ()=>{
return mongoose.createConnection('mongodb+srv://<username>:<passwprd>#mydemo.jk4nr.mongodb.net/<database>?retryWrites=true&w=majority',options);
}
bankDB().then(()=>console.log('Connected to mongoDB-Atlas bankApp...'))
.catch((err)=>console.error('Could not connected to mongoDB',err));
//Set up second mongoose connection
const myDB = ()=>{
return mongoose.createConnection('mongodb+srv://<username>:<password>#mydemo.jk4nr.mongodb.net/<database>?retryWrites=true&w=majority',options);
}
myDB().then(()=>console.log('Connected to mongoDB-Atlas connection 2...'))
.catch((err)=>console.error('Could not connected to mongoDB',err));
module.exports = { bankDB(), myDB() };

How to have express handle and capture my errors

var database = require('database');
var express = require('express');
var app = express();
var cors = require('cors');
app.use(cors());
var bodyParser = require('body-parser');
var urlencodedParser = bodyParser.urlencoded({
extended: false
});
app.post('/dosomething', urlencodedParser, function(req, res) {
if (!req.body.a) {
res.status(500).send(JSON.stringify({
error: 'a not defined'
}));
return;
}
firstAsyncFunction(req.body.a, function(err, result) {
if (err) {
res.status(500).send('firstAsyncFunction was NOT a success!');
} else {
if (result.b) {
secondAsyncFunction(result.b, function(err, data) {
if (err) {
res.status(500).send('secondAsyncFunction was NOT a success!');
return;
}
res.send('EVERYTHING WAS A SUCCESS! ' + data);
});
}
else {
res.status(500).send('result.b is not defined');
}
}
});
});
function firstAsyncFunction(param, callback) {
//Some network call:
// Return either return (callback(null,'success')); or return (callback('error'));
var query = database.createQuery(someOptionsHere);
database.runDatabaseQuery(query, function(err, entities, info) {
if (err) {
return (callback('error'));
}
return (callback(null, 'success'));
});
};
function secondAsyncFunction(param, callback) {
//Some network call:
// Return either return (callback(null,'success')); or return (callback('error'));
var query = database.createQuery(someOptionsHere);
database.runDatabaseQuery(query, function(err, entities, info) {
if (err) {
return (callback('error'));
}
return (callback(null, 'success'));
});
};
var server = app.listen(process.env.PORT || 3000, function() {
var host = server.address().address;
var port = server.address().port;
console.log('App listening at http://%s:%s', host, port);
});
module.exports = app;
I have here a basic express http server. This server has one route, dosomething, which makes two network calls and tells the user if they were a success or not.
This is my entire webserver (this is a bare bones server of my actual server for example purposes). I am now concerned with this server crashing. Reading the docs for express I see there is a default error handler which will catch errors and prevent the server from crashing (http://expressjs.com/en/guide/error-handling.html). I have added the code:
function defaultErrorHandler(err, req, res, next) {
if (res.headersSent) {
return next(err);
}
res.status(500);
res.render('error', { error: err });
}
app.use(defaultErrorHandler);
This still crashes my server though. For example. I had a problem with my database returning an improper JSON response and inside of my firstAsyncFunction (not shown in the code) I tried to parse the JSON and it caused an error telling me it was improper JSON and the server crashed and was unable to take requests anymore until I restarted it. I would like to avoid this and have the default error handler send out a generic response back to the user when this occurs. I thought if I specified the defaultErrorHandler and put it inside of app.use that it would capture and handle all errors, but this does not seem to be the case? Inside of my async function for example you can see I am looking if an error was returned and if it was I send an error back to the user, but what if some other error occurs, how can I get express to capture and handle this error for me?
The defaultErrorHandler cannot handle exceptions that are thrown inside asynchronous tasks, such as callbacks.
If you define a route like:
app.get('/a', function(req, res) {
throw new Error('Test');
});
An error will be thrown, and in this case defaultErrorHandler will successfully catch it.
If the same exception occurs in an async manner, like so:
app.get('/a', function(req, res) {
setTimeout(function () {
throw new Error('Test');
}, 1000);
});
The server will crush, because the callback is actually in another context, and exceptions thrown by it will now be caught by the original catcher. This is a very difficult issue to deal with when it comes to callback.
There is more than one solution though. A possible solution will be to wrap every function that is prone to throw error with a try catch statement. This is a bit excessive though.
For example:
app.get('/a', function(req, res) {
setTimeout(function () {
try {
var x = JSON.parse('{');
}
catch (err) {
res.send(err.message);
}
}, 1000);
});
A nicer solution:
A nicer solution, would be to use promises instead, if it's possible, then for example you can declare a single errorHandler function like so:
function errorHandler(error, res) {
res.send(error.message);
}
Then, let's say you have to following function with fetches stuff from the database (I used setTimeout to simulate async behavior):
function getStuffFromDb() {
return new Promise(function (resolve, reject) {
setTimeout(function () {
resolve("{");
}, 100);
});
}
Notice that this function returns an invalid JSON string. Your route will look something like:
app.get('/a', function(req, res) {
getStuffFromDb()
.then(handleStuffFromDb)
.catch(function (error) { errorHandler(error, res) });
});
function handleStuffFromDb(str) {
return JSON.parse(str);
}
This is a very simplified example, but you can add a lot more functionality to it, and (at least theoretically) have a single catch statement which will prevent your server from crushing.

How to export an object that only becomes available in an async callback?

I have a db.js file in which I set up a MongoDB connection. I would like to export the database object into my main app.js file:
// db.js
require('mongodb').MongoClient.connect(/* the URL */, function (err, db) {
module.exports = db;
});
// app.js
var db = require('./db');
app.get('/', function (req, res) {
db.collection(/* … */); // throws error
});
The error is:
TypeError: Object # has no method 'collection'
So, how can I export the db object properly?
The best option, as suggested in the comments by elclanrs, is to export a promise:
// database.js
var MongoClient = require('mongodb').MongoClient,
Q = require('q'),
connect = Q.nbind(MongoClient.connect, MongoClient);
var promise = connect(/* url */);
module.exports = {
connect: function () {
return promise;
}
}
// app.js
var database = require('./database');
database.connect()
.then(function (db) {
app.get('/', function (req, res) {
db.collection(/* … */);
});
})
.catch(function (err) {
console.log('Error connecting to DB:', err);
})
.done();
(I'm using awesome Q library here.)
Below's the old version of my answer, left for the sake of history (but if you don't want to use promises, instead of going that road, you should use Matt's answer).
Its downside is that it will open a connection each time you require('database.js) (bummer!)
// DO NOT USE: left for the sake of history
// database.js
var MongoClient = require('mongodb').MongoClient;
function connect(cb) {
MongoClient.connect(/* the URL */, cb);
}
module.exports = {
connect: connect
}
// app.js
var database = require('./database');
database.connect(function (err, db) {
app.get('/', function (req, res) {
db.collection(/* … */);
});
});
You can't do it as you want to do it, because, quoting the docs:
Note that assignment to module.exports must be done immediately. It cannot be done in any callbacks.
Instead however, you can assign a property of module.exports in a callback, therefore this will work;
// db.js
require('mongodb').MongoClient.connect(/* the URL */, function (err, db) {
module.exports.instance = db;
});
// app.js
var db = require('./db');
// some time later (when `.instance` is available)
app.get('/', function (req, res) {
db.instance.collection(/* … */);
});
However, the some time later is a bit of a pain, so you may just want to use some sort of callback;
// db.js
var queue = [];
var instance = null;
require('mongodb').MongoClient.connect(/* the URL */, function (err, db) {
instance = db;
while (queue.length) {
queue.pop()(instance);
}
});
module.exports.done = function (callback) {
if (instance === null) {
queue.push(callback);
} else {
callback(instance);
}
};
// app.js
require('./db').done(function (db) {
app.get('/', function (req, res) {
db.collection(/* … */);
});
});
The above also handles cases where handlers via done() are attached after the connection has already been made.
The servers typically have 3 phases: init, serve and uninit. This seems obvious but when you start writing servers from scratch (ie. in Java you start inheriting from HttpServlet) sometimes you forget how to do the things...
In the startup phase you must open the db connection (pool) and save the object somewhere (typically in your db.js module). Then in the service phase retrieve the mongodb connection from db.js.
Related: How to get a instance of db from node-mongo native driver?
In your code:
// db.js
require('mongodb').MongoClient.connect(/* the URL */, function (err, db) {
module.exports = db;
});
// app.js
var db = require('./db');
app.get('/', function (req, res) {
db.collection(/* … */); // throws error
});
You've called connect in the db.js class, yet it's asynchronous.
The call in app.js to require is synchronous in behavior though, so it will always receive an undefined value (as the exports will not be assigned to a value at the time the db.js has finished executing).
I'd suggest keeping things simple.
The option I usually use is something where the app code makes the connection and doesn't start listening for HTTP connections until it is complete. Then, I'll initialize each route file by calling a named method and pass the database connection to it.
Or, you could just always call connect in each module, yet cache the value. (The connect call would need to be called within the route callback code so that the routes were defined immediately and not when the connection was actually established).
// db.js
var _db = null;
exports = function(callback) {
if (!_db) {
_db = {}; // only one connection, so we'll stop others from trying
require('mongodb').MongoClient.connect(/* the URL */, function (err, db) {
_db = db;
callback(err, db);
});
} else {
callback(null, _db);
}
};
// app.js
var db = require('./db');
db(function(err, connection) {
// store the connection value here and pass around, or ...
// call this always in each file ...
});
/// or ...
app.get('/', function (req, res) {
db(function(err, connection) {
connection.collection(/* … */);
});
});
Or, you could use MongooseJS (a wrapper for the native NodeJS MongoDB driver) where commands, etc. are queued if the connection isn't available yet ....

Categories

Resources