I use mysql (without sequelize) and generally create the connection in a module.export function which I then require from other files like so:
var db;
module.exports={
getConnection = function (){return new Promise(function(resolve, reject){
// db = mysql connection stuff
resolve(db)
});},
//other database related operations similarly
}
Now, I want to separate everything for example, I want to have a userModel that will have a few database operations related to users which will just import the db connection and so on;
But what is the optimal way of achieving this? Thanks.
Since it's used so broadly and doesn't present a high risk of variable naming conflict, I prefer to add it to the global object. Then you never have to make sure it's being included, passed, or imported from a module.
Consider the following example:
// in your application initialization file such as app.js
// require items …
const mysql = require('mysql');
const connection = mysql.createPool({
connectionLimit: 10,
host: process.env.DB_HOST || '127.0.0.1',
user: process.env.DB_USER || 'local_user',
password: process.env.DB_PASSWORD || 'local_password',
database: process.env.DB_NAME || 'local_database',
multipleStatements: true,
charset: 'utf8mb4' // necessary if you might need support for emoji characters
});
connection.on('connection', function (connection) {
// handy for testing
console.log('Pool id %d connected', connection.threadId);
});
connection.on('enqueue', function () {
// handy for testing
console.log('Waiting for available connection slot');
});
// make the connection global via the the variable db
global.db = connection;
// everywhere else in your app, use the now global db variable when running queries
db.query(
'INSERT INTO users SET ?', [{name: "John Smith"}], function(error, results, fields) {
if (error) throw error;
console.log(results);
});
}
Related
My intention is upon a file upload the main Node.JS Express backend process will fork a child process which will call a java program to parse the uploaded file and then the NodeJS child will make bulk (100K+) MySQL queries. I want the child to handle the queries because I do not want the parent process to "block." Since both the parent and child processes need access to the MySQL connection I have defined in an external file containing an npm mysql pool.
mysqlConnector.js:
// Load module
var mysql = require('mysql');
// Initialize pool
var pool = mysql.createPool({
connectionLimit : 10,
host : 'localhost',
port: 3306,
user: 'bob',
password: 'my_pass',
database: 'my_db'
});
const doQuery = (query_string) => {
console.log(pool);
pool.query(query_string, function (error, results, fields) {
if (error) throw error;
console.log('in doQuery: query' );
});
};
exports.doQuery = doQuery;
exports.pool = pool
My index.js (main process) is able to access this pool and make queries with no issue. The main process will fork the child running the following code
handleprops.js:
const {execSync, fork} = require('child_process');
const {doQuery} = require('../mysqlConnector');
const exec_options = {
cwd: null,
env: null,
encoding: 'utf8',
timeout: 0,
maxBuffer: 200 * 1024,
killSignal: 'SIGTERM'
};
process.on('message', msg=>{
if(msg.filepath && msg.username){
execSync(`java -jar C:\\Users\\colli\\d2reader\\out\\artifacts\\d2reader_jar\\d2reader.jar ${msg.filepath}`, exec_options);
var filename = msg.filepath.replace(/^.*[\\\/]/, '');
filename = __dirname + "\\json\\" + filename.substring(0, filename.length-3) + "json";
try{
const fs = require('fs');
const data = fs.readFileSync(filename, 'utf8');
processSharedStashQueries(JSON.parse(data), msg.username);
} catch(err) {
console.error(err);
}
process.exit();
}
});
const processSharedStashQueries = (data, username) => {
const INSERT_USER_QUERY = `insert into sharedstashes (NumStashes, SharedGold, NumItems, UserID) values (${data.numStashes}, ${data.sharedGold}, ${data.numItems}, (select id from users where username = '${username}'))`;
doQuery(INSERT_USER_QUERY);
};
When the child calls doQuery() inside processSharedStashQueries() the pool object will be logged and I can verify it is a valid and defined object and is the same exact pool object the parent process uses. However, nothing happens when the child hits the pool.query() line of code. No error and no console logs. It's as if nothing happens.
Why can the child process "see" the pool object but not be able to use it?
Is my overall approach incorrect? I fear I might be missing some knowledge in how to use npm mysql effectively to do such a thing. Should I put the MySQL query smarts inside the external java program? Or is that bad design?
Thanks in advance!
I'm new to javascript and node.js.
Can someone answer the following questions.
1. How I split the PostgreSQL part properly in an other file.
2. How I the pest practice is to use the pg pools.
3. How I improve this code for production.
const express = require('express');
const app = express();
const pg = require('pg');
const pool = new pg.Pool({
user: 'admin',
password: 'test123!',
host: '127.0.0.1',
port: '5432',
database: 'test_db'
});
app.get('/api/recipes', function(req, res){
pool.connect(function(err, client, done) {
if(err){
console.log('Connection failed '+ err);
res.status(400).send(err);
}
client.query('SELECT * FROM recipes;', function(err, result) {
done();
if(err){
console.log('Error with query! ERROR code: ' + err.code);
res.status(400).send(err);
}
else{
res.status(200).send(result.rows)
}
});
});
});
app.get('/api/recipes/:id', function(req, res){
var id = req.params.id;
pool.connect(function(err, client, done) {
if(err){
console.log('Connection failed ' + err);
res.status(400).send(err);
}
else{
client.query('SELECT * FROM recipes WHERE recipes_id = $1;', [id], function(err, result) {
done();
if(err){
console.log('Error with query! ERROR code: ' + err.code);
res.status(400).send(err);
}
else{
res.status(200).send(result.rows)
}
});
}
});
});
app.listen(3000,function(){
console.log('Server listen on port 3000');
});
There are a lot of ways folks go to split the code you've described. I'll take it piece by piece.
First, pull any configurable variables out and setup one file that can get them from the environment (possibly with dev defaults in place, your choice on that). You can use a library like commander or convict, but honestly I prefer to just write a simple file that pulls them myself:
// ./config.js
module.exports = {
pool: {
user: process.env.DB_USER || 'admin',
password: process.env.DB_PW || 'test123!',
host: process.env.DB_HOST || '127.0.0.1',
port: process.env.DB_PORT || '5432',
database: process.env.DB_NAME || 'test_db'
}
};
As for your database calls, some folks like to use ORM-like stuff such as sequelize, but again I tend to start simple and add things as needed. In your case, you should think about what boilerplate stuff you can make common code around, and then wrap those into simple modules that only expose to the calling code stuff it really needs. For example, you will note that most of your routes are going to connect to the pool, test for an error, then run a query if it doesn't error out, and finally render either the error or query results, right? So that can all be wrapped into a fairly simple query function that handles the boilerplate internally and works with just a query expression and a callback, for example:
// ./db/index.js
const pg = require('pg');
const config = require('./config');
const pool = new pg.Pool(config.pool);
function query(sql, params, callback) {
// maybe check for valid inputs here or something, but at least normalize in case folks don't pass params
if(arguments.length < 3) {
callback = params;
params = null;
}
pool.connect((err, client, done) => {
// just exit here and let the calling code know there was a problem
if(err) return callback(err);
// I haven't tested this w/ the pg library recently, you might have to do two of these if it doesn't like null as a second argument
client.query(sql, params, (err, result) => {
if(err) return callback(err);
done();
// calling code probably doesn't care about anything but rows, but you can do other stuff here if you prefer
return callback(null, result.rows);
});
});
};
// You can also add additional functions if you want shorthand for doing things like query by ID or with params, or similar
module.exports = { query };
I also think that it can be helpful to store the SQL strings somewhere centrally, or on model objects, just to make the routing code note have to care about that. For a super simple example using your two routes, I might do something like this:
// ./db/queries.js
module.exports = {
RECIPES: {
LIST: 'SELECT * FROM recipes;',
FIND_BY_ID: 'SELECT * FROM recipes WHERE recipes_id = $1;'
}
};
Ok, so now your routing code can be quite simple, you can just get the db module and work the query, letting the routing worry just about what it's got to do with the request and response. Another option that folks like is to actually create a module for each model in your app (e.g. a Recipe) that wraps the above two files into a set of static functions so that your routes don't even know they're querying specifically. The calls in that case would be something like Recipe.list(cb) or Recipe.findById(id, cb). This is a style made popular by Ruby on Rails a few years ago, it has mixed acceptance in the Node community, but I'm mentioning it for completeness.
// ./routes/recipes.js
const router = require('express').Router();
const db = require('./db');
const queries = require('./db/queries');
router.get('/api/recipes', (req, res, next) => {
db.query(queries.RECIPES.LIST, (err, rows) => {
if(err) return next(err);
return res.send(rows); // status 200 is the default here
});
});
router.get('/api/recipes/:id', (req, res, next) => {
const id = req.params.id;
db.query(queries.RECIPES.FIND_BY_ID, [id], (err, rows) => {
if (err) return next(err);
return res.send(rows);
});
});
Finally, in your main Express setup file:
// ./app.js
const express = require('express');
const app = express();
const recipeRoutes = require('./routes/recipes') // note if you have an index.js file that gets imported just by calling for the folder, so that's a way to group features as well
app.use(recipeRoutes);
// I'm a big fan of error handling middleware. There's a more complex approach I did in [praeter][4] that gives you http-verb based errors that you can then catch and send the appropriate status, but that's again more complex than you might need here.
app.use((err, req, res, next) => {
// this can be as simple or as complex as you like.
// note it's a best practice to send only "clean" messages to the client, so you don't give away that you're using a Postgres db or other stuff that makes hacking easier.
console.error(err);
res.status(500).send('Oops! Something went wrong!!');
});
Obviously, there's a lot of ways to skin this cat, so I'd recommend mostly just looking for where you're repeating yourself, and then refactor to repeat less. Also, if you're interested in making more production-ready apps in general, the 12 factor app is a must-read.
To answer number 1,
dbPool.js
const pg = require('pg');
export.pool = new pg.Pool({
user: 'admin',
password: 'test123!',
host: '127.0.0.1',
port: '5432',
database: 'test_db'
});
app.js
const express = require('express');
const app = express();
const pool = require('./dbPool');
....
You should create config file and require that file in app.js
--config
----config.js
--app.js
var config = {
production: {
pool: {
user: 'admin',
password: 'test123!',
host: '127.0.0.1',
port: '5432',
database: 'test_db'
}
},
development: {
pool: {
user: 'admin',
password: 'test123!',
host: '127.0.0.1',
port: '5432',
database: 'test_db'
}
}
}
exports.get = function get(env) {
return config[env] || config.development;
}
I think my connections aren't being released properly. Sometimes I get an error stating that my pool has reached its limit. Also, sometimes accessing the db randomly takes 15+ seconds. Whenever I check how many connections are in use using pool._allConnections.length, it never returns anything above 60. Here is my code:
const mysql = require('mysql');
const config = require('./config.json');
const pool = mysql.createPool({
connectionLimit : 999,
host: config.host,
user: config.user,
password: config.password,
database: config.database
});
const db = (() => {
_query = (query, params, callback) => {
pool.getConnection((err, connection) => {
if (err) {
callback(null, err);
} else {
connection.query(query, params, (err, rows) => {
connection.release();
if (!err) {
callback(rows);
} else {
callback(null, err);
}
});
}
});
};
return {
query: _query
};
})();
module.exports = db;
i've faced same issue and https://github.com/mysqljs/mysql/issues/1518 help me. Notice line
Yeah, that was the issue. I was calling mysql.createPool on each
query.
Actually you are importing db from query.js (let say your post code) to fire a query. every time you fire a query it create a new pool.to solve this issue you can put createPool code block to app.js and can share it global or can use in query.js via any other code style.
Referring official doc https://github.com/mysqljs/mysql#pooling-connections find line
Since the pool.query method is a short-hand for the pool.getConnection
-> connection.query -> connection.release() flow, calling pool.end() before all the queries added via pool.query have completed,
later i used this to stop headache of release connection
When I run the server attempting to create two databases (db1 and db2), the system kicks backs this error:
Possibly unhandled SequelizeBaseError: database "db2" does not exist
As a reference, there is similar stackoverflow question on this topic here, however, the author of the solution does not cover the solution to how the server.js file is setup. You will notice I have structured my index.js file similar to their answer.
My models/index.js file and server run and executes scripts properly, yet the second database does not work at all and does not even get initialized.
Can someone provide a solution for the server.js file to accurately initialize two databases in this one server?
The following is the partial code from the models/index.js and server.js files. In the server.js file, I am using .sync to initialize the databases.
server.js
[...]
//sync's sequelize DB and tables
db['db1'].sequelize.sync(function(err){});
db['db2'].sequelize.sync(function(err){});
models/index.js
var databasesArray = ['db1', 'db2']
var databasesObj = {
database: {
db1: {
DBName: 'db1',
User: user,
Password: password,
Config: config,
},
db2: {
DBName: 'db2',
User: user,
Password: password,
Config: config,
}
}
} // EOF databaseObj
for(var i = 0; i < databasesArray.length; ++i) {
var databasePointerToOBJ = databasesArray[i];
var database = databasesObj.database[databasePointerToOBJ]
if(database.DBName == 'db1'){
var sq = new Sequelize(database.DBName, user, password, config)
db['db1'] = {
Sequelize: Sequelize,
sequelize: sq,
Table1: sq.import(__dirname + '/...')
}
}else if(database.DBName == 'db2'){
var sq = new Sequelize(database.DBName, user, password, config)
db['db2'] = {
Sequelize: Sequelize,
sequelize: sq,
Table1: sq.import(__dirname + '/...')
}
}
}
module.exports = db;
--- EDIT ---
The author of the solution was correct. In order for a new database to be created, it must be created prior to being sync'd. As piotrbienias rightly notes, adding the code for adding a new DB in a your initialization script would be the best option. Piotrbienias solution is .js option. The code that worked for me is a .sh option and is as following:
PG_HOST=localhost
PG_PORT=5432
PG_DB=databaseName
PG_USER=ubuntu
PG_PASS='EnterPassword'
sudo -u postgres createdb -U postgres -O $PG_USER $PG_DB
You need to create the database manually before trying to access it via Sequelize - it does not create it if it does not exists. You can use a pg module inside some initialisation script to create the DB via CREATE DATABASE before doing any synchronisation and connection, or simply create it via postgres CLI
const pg = require('pg');
module.exports = function(next){
var connectionData = {
user: 'postgres',
password: 'password',
host: 'localhost'
};
var databaseName = 'db2';
var connectionUri = `postgres://${user}:${password}#${host}/postgres`;
pg.connect(connectionUri, function(err, client, done) {
client.query(`CREATE DATABASE ${databaseName}`, function(error){
// here you can perform some sequelize operations after creating the database
client.end(); // disconnect client
next(); // you can call it with some parameter like Sequelize instance etc.
});
});
};
I'm trying build rest-like API using mongodb(with mogoose) and node.js with restify.
I'm an absolute novice in the mongo world, and I'm not sure where problem is. Is this the db connection's problem, or something else?
So, I'm doing it this way:
rest-server.js
//start server
var restify = require('restify');
var server = restify.createServer();
server.use(restify.bodyParser());
//connect db
var config = require('./Config.js');
var mongoose = require('mongoose'),
db = mongoose.createConnection('localhost', 'travelers'),
Schema = mongoose.Schema,
ObjectId = mongoose.SchemaTypes.ObjectId;
db.on('error', console.error.bind(console, 'DB connection error:'));
db.once('open', function callback() {
console.log('db connection open');
});
var LoginModel = require('./models/LoginModel.js').make(Schema, mongoose);
var LoginResource = require('./resource/LoginResource.js')(server, LoginModel);
LoginModel.js
function make(Schema, mongoose) {
var LoginSchema = new Schema({
//id: (?)
username: String,
password: String,
traveler_id: Number,
contact_id: Number,
last_login: Date,
token: String
});
return mongoose.model('Login', LoginSchema);
}
module.exports.make = make;
LoginResource.js
exports = module.exports = function (server, LoginModel) {
var LoginRepository = require('../repository/LoginRepository.js');
server.get('/login/:username/:password', function (req, res, next) {
LoginRepository.getLogin(req, res, next, LoginModel);
});
}
LoginRepository.js
function getLogin(req, res, next, LoginModel) {
var query = LoginModel.find({ username: req.params.username, password: req.params.password});
query.exec(function (err, docs) {
console.log('got it!');
res.send(docs);
});
}
test query
curl localhost:8080/login/qqq/www
So I never got to res.send(docs);
Actually, I didn't add anything to the db. I just want to know that the query didn't find anything.
UPDATE:
I don't understand why, but this problem can be solved if I change the db connection code like this:
//connect db
var config = require('./Config.js');
var mongoose = require('mongoose/');
db = mongoose.connect(config.creds.mongoose_auth),
Schema = mongoose.Schema;
(use mongoose.connect and define db and Schema vars as global)
but in this case db.on() and db.once() throw an exception "no such methods".
In other words - problem mmm... solved but I still don't know why.
This looks like a helpful link: server.js example on github
Models that you've created by calling mongoose.model() use Mongoose's default connection pool when executing queries. The default connection pool is created by calling mongoose.connect(), and any queries you make using your created models will be queued up until you call that and it completes.
You can also create separate connection pools (that can have their own models!) by calling db = mongoose.createConnection() like you were originally doing, but you would have to create the models for that using db.model() which is why things weren't working for you originally.