Mongoose query blocks Node.js - javascript

Mongoose blocks Node.js when it is getting data. I thought that it is supposed to be absolutely no blocking and when callback appears then it should just get back there.
The problem is with:
Container.find({}, function (err, documents) {
res.status(200).send(documents);
});
When I will run this route in ExpressJS it will just freeze NodeJS for around 10sec, and no one else can reach connection then.
I'm having a open connection to MongoDB at the start using Mongoose and not doing anything with it later on. What's the problem? It is supposed to work like that?
UPDATE:
So this is how I init mongoose
function initialDb() {
seed();
seedStructure();
startApplication();
}
database.connect();
database.loadModels(initialDb);
and this is the place where i connect and init models
import mongoose from 'mongoose';
import chalk from 'chalk';
import config from '../config';
export default {
loadModels(callback){
require('../../models/site');
require('../../models/page');
require('../../models/container');
require('../../models/template');
require('../../models/theme');
if (typeof callback === 'function') {
return callback();
}
},
connect () {
mongoose.connect(config.db.uri, function (err) {
if (err) {
console.error(chalk.red('Could not connect to MongoDB!'));
console.log(err);
}
});
},
disconnect(callback) {
mongoose.disconnect(function (err) {
console.info(chalk.yellow('Disconnected from MongoDB.'));
callback(err);
});
}
};
and the model
var mongoose = require('mongoose');
var Schema = mongoose.Schema;
var container = new Schema({
});
let model = mongoose.model('container', container);
module.exports = model;
It returns around 26k documents.

Ok so basically I found out, that if I will stream that instead of getting it with one callback it will work way better (I will be able to get into some other actions)
like this
var stream = Container.find({}).stream();
var array = [];
stream.on('data', (doc) => {
array.push(doc);
}).on('error', (err) => {
}).on('close', () => {
res.send(array);
});
It will solve the problem. So this is how I would get big data from mongodb, though why it is slowing so much if I will get it in one callback? Due to 12MB data? Big json that needs to be parsed or what?
Cause this is a quite mysterious for me (the reason for slow down)

Related

Unable to export db properties from nodejs module

I am trying to export database properties stored in properties file from Javascript module. By the time I read database properties file, Javascript file is already exported and data properties appear undefined wherever I use in other modules.
const Pool = require('pg').Pool;
const fs = require('fs')
const path = require('path');
class DbConfig {
constructor(dbData) {
this.pool = new Pool({
user: dbData['user'],
host: dbData['host'],
database: dbData['database'],
password: dbData['password'],
max: 20,
port: 5432
});
}
}
function getdbconf() {
const dbData = {};
fs.readFile("../../db_properties.txt"), 'utf8', (err, data) => {
if (err) {
console.error(err)
return
}
// dbData = {"user":"postgres", "password": "1234"...};
return dbData;
});
}
let db = new DbConfig(getdbconf());
let dbPool = db.pool;
console.log("dbpool : -> : ",dbPool); // username and password appear undefined
module.exports = { dbPool };
Is there a way to read data before exporting data from Javascript module?
Usually database config or any other sensitive info is read from a .env file using dotenv .
Or
you could also provide env from command line itself like
DB_HOST=127.0.0.1 node index.js
inside your index.js
console.log(process.env.DB_HOST)
Please create a new file (connection-pool.js) and paste this code:
const { Pool } = require('pg');
const poolConnection = new Pool({
user: 'postgresUserName',
host: 'yourHost',
database: 'someNameDataBase',
password: 'postgresUserPassword',
port: 5432,
});
console.log('connectionOptions', poolConnection.options);
module.exports = poolConnection;
For use it, create a new file (demo-connection.js) and paste this code:
const pool = require('./connection-pool');
pool.query('SELECT NOW();', (err, res) => {
if (err) {
// throw err;
console.log('connection error');
return;
}
if (res) {
console.log(res.rows);
pool.end();
}
});
This is an alternative option 🙂
Exporting the result of async calls
To export values which have been obtained asynchronously, export a Promise.
const fs = require('fs/promises'); // `/promise` means no callbacks, Promise returned
const dbDataPromise = fs.readFile('fileToRead')); //`readFile` returns Promise now
module.exports = dbDataPromise;
Importing
When you need to use the value,
const dbDataPromise = require('./dbdata');
async init() {
const dbData = await dbDataPromise;
}
//or without async, using Promise callbacks
init() {
dbDataPromise
.then(dbData => the rest of your code that depends on dbData here);
}
Current code broken
Please note that your current code, as pasted above, is broken:
function getdbconf() {
const dbData = {};
fs.readFile("../../db_properties.txt"), 'utf8', (err, data) => {
//[...] snipped for brevity
return dbData;
});
}
fs.readFile "returns" dbData, but there is nothing to return to, since you are in a callback which you did not call yourself. Function getdbconf returns nothing.
The line that says let db = new DbConfig(getdbconf()); will NOT work. It needs to be inside the callback.
The only way to avoid putting all of your code inside the callback (and "flatten" it) is to use await, or to use readFileSync
Avoiding the issue
Using environment variables
Suhas Nama's suggestion is a good one, and is common practice. Try putting the values you need in environment variables.
Using synchronous readFile
While using synchronous calls does block the event loop, it's ok to do during initialization, before your app is up and running.
This avoids the problem of having everything in a callback or having to export Promises, and is often the best solution.

Await isn't waiting for promise to resolve

Good evening all!
I have been stuck on this issue for a while and I can't seem to solve it through sheer Googling and so I am reaching out to you all.
Context:
I am writing a small application that handles all the calendars and basic project information for all the interns at our company because my boss is constantly asking me what they're up to and I wanted to give him something that he could look at, so I decided to solve it with code whilst also learning a new framework in the process(Express).
Right now I have my routes all set up, I have my controllers all set up, and I have my DB cursor all set up. When I make the call to the route I have defined, it runs the getAllUsers() controller function and inside that controller function it makes a call to the database using the getAllUsers() function on the DB cursor, I want the code to wait for the DB cursor to return its result before continuing but it isn't and I can't work out why. The DB cursor code does work because it fetches the data and logs it out fine.
Any help would be greatly appreciated, I have put the three bits of code in question below, let me know if you need me to show more.
p.s ignore the 'here1', 'here2', etc calls, this is how I have been working out what's happening at any point in time.
routes.ts
import express from 'express';
import controllers from './controller.js';
export default (app: express.Application) => {
// Users
app.route('/users').get(controllers.getAllUsers)
app.route('/users').post(controllers.postNewUser)
app.route('/users').delete(controllers.deleteUser)
app.route('/user/:emailAddress').get(controllers.getUser)
app.route('/user/:emailAddress').put(controllers.updateUser)
}
controllers.ts
import express from 'express';
import dbcursor from '../services/dbcursor.js';
// Interfaces
import { Project, User } from '../services/interfaces.js'
const controllers = {
// Users
getAllUsers: async (req: express.Request, res: express.Response) => {
try {
const dbRes = await dbcursor.getAllUsers();
console.log('here 3', dbRes)
res.status(200).json({
message: 'Users fetched succesfully!',
dbRes: dbRes
});
} catch (err) {
res.status(400).json({
message: 'Failed to get users.',
dbRes: err
});
}
},
}
dbcursor.ts
import dotenv from 'dotenv';
import mongodb from 'mongodb'
dotenv.config();
// Interfaces
import { User, Project } from './interfaces'
// DB Client Creation
const { MongoClient } = mongodb;
const uri = process.env.DB_URI || ''
const client = new MongoClient(uri, { useNewUrlParser: true, useUnifiedTopology: true });
const dbcursor = {
// Users
getAllUsers: async () => {
let dbRes;
try {
await client.connect(async err => {
if (err) throw err;
console.log("here 1", dbRes)
const collection = client.db("InternManager").collection("Users");
dbRes = await collection.find().toArray()
console.log("here 2", dbRes)
return dbRes;
});
} catch(err: any) {
return err;
}
},
}
It's generally not a good idea to mix callbacks and promises. Try not passing a callback to the client.connect method, and you should be able to await the promise as expected
getAllUsers: async () => {
let dbRes;
try {
await client.connect();
console.log("here 1", dbRes)
const collection = client.db("InternManager").collection("Users");
dbRes = await collection.find().toArray()
console.log("here 2", dbRes)
return dbRes;
} catch(err: any) {
throw err; // If you're just catching and throwing the error, then it would be okay to just ignore it
}
},

How should I handle a db connection in Javascript / AWS Lambda

In my JS Lambda function I have something along the lines of the following...
import utils from './utils'
index.handler() {
return utils.initDB()
.then(function() {
return utils.doSomething()
utils.js:
var dbConfig = null;
var knex = null;
function initDB() {
dbConfig = require('../db');
knex = require('knex')(dbConfig);
return;
}
Basically, how should I pass around the knex object? Is it okay to have as a global var in the utils file? Should I return it to the handler and pass it into every smsUtils.doX call? I'm thinking this might be causing problems with db connection/pooling, but I don't know how to find out.
For anyone who stumbles on this in the future (i.e. me when I'm googling how to do this again in a year):
http://blog.rowanudell.com/database-connections-in-lambda/ explains connection reuse in Lambda. Should look something like this:
const pg = require('pg');
const client = new pg.Client('postgres://myrds:5432/dbname');
client.connect();
exports.handler = (event, context, cb) => {
client.query('SELECT * FROM users WHERE ', (err, users) => {
// Do stuff with users
cb(null); // Finish the function cleanly
});
};

How to get Mongoose to list all documents in the collection? To tell if the collection is empty?

I'm using a MEAN stack and writing these methods in Mongoose. I'm wondering what's wrong with what I put in the Mongoose model file. I would like to use Mongoose to simply print out a list all the documents in the myModel collection.
myModel.methods.myMethod = function(cb){
this.model("Bids").find({}, 'myField', function(err, results){
if (err){console.log(err);return err;}
console.log("okay");
console.log(results);
})
this.save(cb);
}
Also, what is the code that I can write in Mongoose to tell if the myModel collection is empty or not?
It's better to teach a man how to fish than to give him a fish ...
So it would be extremely helpful if you can suggest what debugging tools I can install, such as an Express middleware, that can help me debug myself. Please post your debugging suggestions here.
I'm assuming every other setup required for mongoose is correct.
At the line below, I think 'myField' is not needed.
this.model("Bids").find({}, 'myField', function(err, results)
Here is something more from scratch, maybe it would help you to trace-back you steps:
var mongoose = require('mongoose');
//connection to Mongodb instance running on=======
//local machine or anywhere=========================
var uri = 'mongodb://localhost:27017/test';
var connection = mongoose.createConnection(uri);
//Define Schema==================================
var Schema = mongoose.Schema;
var BlogPostSchema = new Schema({
author: { type: Schema.Types.ObjectId },
title: String,
body: String
});
//Create model===================================================
var BlogPostModel = connection.model('BlogPost', BlogPostSchema);
//function to insert doc into model NOTE "pass in your =======
//callback or do away with it if you don't need one"=========
var insertBlogPost = function (doc, callback) {
//here is where or doc is converted to mongoose object
var newblogPost = new BlogPostModel(doc);
//save to db
newblogPost.save(function (err) {
assert.equal(null, err);
//invoke your call back if any
callback();
console.log("saved successfully");
});
};
//function to get all BlogPosts====================================
var getAllBlogPosts = function (callback) {
//mongoose get all docs. I think here answers your question directly
BlogPostModel.find(function (err, results) {
assert.equal(null, err);
//invoke callback with your mongoose returned result
callback(results);
});
};
//you can add as many functions as you need.
//Put all of your methods in a single object interface
//and expose this object using module.
var BlogPostManager = {
insertBlogPost: insertBlogPost,
getAllBlogPosts : getAllBlogPosts
}
module.exports = BlogPostManager;

mongodb with typescript `toArray` method not working

I'm using Visual studio with typescript. Everything in this code compiles. According to the mongoDB docs I'm using toArray correctly. I'm a little new to typescript, so I don't know if this is a typescript error or mongodb. The tests variable seems to have a method toArray but when I call it nothing returns. The console.log call isn't even ran. According to the docs and the typescript samples this is the correct way to do it. Can anyone share with me any errors in my code, or the "correct" way to do this?
///<reference path="c:\DefinitelyTyped\mongodb\mongodb.d.ts"/>
import mongodb = require("mongodb")
var server = new mongodb.Server('localhost',27017, { auto_reconnect: true})
var db = new mongodb.Db('test', server, { w: 1 });
export interface Test {
_id: mongodb.ObjectID;
a: number;
}
db.open(function () { });
export function getTest(callback: (test: any) => void): void {
db.collection('test', function (err, test_collection) {
// test_collection.find().toArray -- this doesn't work either
test_collection.find(function (err, tests) {
console.log(tests, 'from getTest') // log's an object with `toArray` method
tests.toArray(function (err, docs) { // nothing returned. Seems like the callback isn't ran
if (err) { console.log(err) }
console.log(docs, 'from toArray')
callback(docs)
})
})
})
}
You problem seems to be not placing your function within the db.open method's callback in general:
var mongodb = require("mongodb");
var server = new mongodb.Server('localhost', 27017, { auto_reconnect: true });
var db = new mongodb.Db('test', server, { w: 1 });
db.open(function() {
db.createCollection('test', function(err, collection) {
collection.find().toArray(function(err,docs) {
console.log( docs );
});
});
});
You generally need to make sure a connection is open before doing anything

Categories

Resources