I am trying to create some unit tests with Mocha and Chai but I am unable to connect to the mysql server. Here I have my unit test:
process.env.NODE_ENV = 'test';
let chai = require('chai');
let chaiHttp = require('chai-http');
let server = require('../app');
let should = chai.should();
chai.use(chaiHttp);
describe('API endpoint /authenticate', function () {
before((done) => {
/* Todo: Clean up Database*/
done()
});
it('should authenticate a user given CORRECT username/password', (done) => {
let user = {
username: 'ax850',
password: 'test'
};
chai.request(server)
.post('/api/authenticate')
.send(user)
.end((err, res) => {
res.should.have.status(200);
done();
});
done();
});
});
As you can see, I am doing a POST request. Then in the script where I handle this POST request, I am connecting to a mySQL server like so:
const db = require('./../../db_conn');
And in the db_conn I have:
const mysql = require('mysql');
const config = require('config.json')('./config.json');
let db;
const db_config = config.a_c.dbConfig;
function connectDatabase() {
if (!db) {
db = mysql.createConnection({
host: db_config.host,
user: db_config.user,
port: db_config.port,
database: process.env.NODE_ENV === 'test' ? db_config.db_test : db_config.db_dev,
//database: db_config.db_dev,
password: db_config.password
});
db.connect(function(err){
if(!err) {
console.log('Database is connected!');
} else {
console.log('Error connecting database!');
}
});
}
return db;
}
module.exports = connectDatabase();
However, when I run my test command: mocha --timeout 10000, it doesn't connect to the database. The mysql.createConnection runs unsuccessfully but doesn't give an error either. Any ideas? This works when I run the server without testing.
You are calling done before the request finishes.
chai.request(server)
.post('/api/authenticate')
.send(user)
.end((err, res) => {
res.should.have.status(200);
done();
});
// this is executed before chai.request is fulfilled
done();
Related
I need to create a mysql connection in nodejs , but the credentials for the mysql comes from a third party credential manager service. Can somebody suggest me a way to achieve this?
database.js - i am using connection from this file in all other database operations
const mysql = require("mysql");
const {env} = require('./globals')
const connection = mysql.createConnection({
host: env.DATABASE.HOST,
user: env.DATABASE.USER,
password: env.DATABASE.PASSWORD,
database: env.DATABASE.NAME,
multipleStatements: true
});
connection.connect(function (err) {
if (err) {
console.log("Error in DB connection");
console.log("err", err);
} else console.log("Connected!");
});
module.exports = connection
globals.js
const {getSecret} = require('../src/service')
require("dotenv").config();
async function getCredentials() {
const result = await getSecret()
return JSON.parse(result?.SecretString || {})
}
const credentials = getCredentials() // not working, and i can't use await here
const env = {
DATABASE: {
HOST: credentials.ip_address,
PASSWORD: credentials.password,
NAME: credentials.dbname,
USER: credentials.username,
},
SKU: process.env.SKU
}
module.exports.env = env
Your 2 main options are:
Don't export connection but export an async function that returns a connection.
Write an init() function that sets up your database connection, and ensure it's one of the first things your application calls before anything else.
Well first, you need to fix up that globals.js file. Logic that depends on an async function must be async itself. You can just move everything into the async function like so:
const {getSecret} = require('../src/service')
require("dotenv").config();
async function getCredentials() {
const result = await getSecret()
const credentials = JSON.parse(result?.SecretString || {})
return {
DATABASE: {
HOST: credentials.ip_address,
PASSWORD: credentials.password,
NAME: credentials.dbname,
USER: credentials.username,
},
SKU: process.env.SKU
}
}
module.exports = { getCredentials }
And since even your database connection in database.js depends on this async function, it will have to be async as well:
const mysql = require("mysql");
const {getCredentials} = require('./globals')
const getConnection = getSecret().then(function (env) {
const connection = mysql.createConnection({
host: env.DATABASE.HOST,
user: env.DATABASE.USER,
password: env.DATABASE.PASSWORD,
database: env.DATABASE.NAME,
multipleStatements: true
});
connection.connect(function (err) {
if (err) {
console.log("Error in DB connection");
console.log("err", err);
} else console.log("Connected!");
});
return connection;
})
module.exports = getConnection
I have redis up and running on port 6379, connecting via telnet works fine.
I'm trying to connect to it on node.js, but i'm getting no response from the event listeners.
If i make a call any function like client.set() I get: "ClientClosedError: The client is closed".
This is the code im running:
const redis = require('redis');
const client = redis.createClient(6379);
client.on('connect', () => {
console.log('connected');
});
client.on('end', () => {
console.log('disconnected');
});
client.on('reconnecting', () => {
console.log('reconnecting');
});
client.on('error', (err) => {
console.log('error', { err });
});
setTimeout(() => {console.log("goodbye") }, 20*1000 );
Nothing happens for 20 seconds, and then it closes
Starting from v4 of node-redis library, you need to call client.connect() after initializing a client. See this migration guide.
const redis = require('redis');
const client = redis.createClient({ socket: { port: 6379 } });
client.connect();
client.on('connect', () => {
console.log('connected');
});
You might also want to consider running the client connect method with await in an asynchronous function. So you don't have to worry about event listeners.
const redis = require('redis');
(async () => {
try {
const client = redis.createClient({ socket: { port: 6379 } });
await client.connect();
console.log('connected');
} catch (err) {
console.error(err)
}
})()
I am trying to connect my application to the database using the connection pool method, its connecting fine, and data insertion is happening fine without any issues but other queries in the same file are slowing down.
I have tried with release() method also not working properly.
How can release the pool to the next query once it's executed the current query?
Below is my dbpool.js file code where I am writing a common generalized database connection,
var pg = require('pg');
var PGUSER = 'postgres';
var PGDATABASE = 'test_database';
var config = {
user: PGUSER, // name of the user account
host: 'localhost',
database: PGDATABASE, // name of the database
password: 'password#AWS',
port: 5432,
max: 10,
idleTimeoutMillis: 10000
};
const pool = new pg.Pool(config);
const DB = {
query: function(query, callback) {
pool.connect((err, client, done) => {
if(err){ return callback(err); }
client.query(query, (err, results) => {
// done();
client.release();
// if(err) { console.error("ERROR: ", err) }
if(err) { return callback(err); }
callback(null, results.rows);
})
});
}
};
module.exports = DB;
I tried with both the done() and client.release() method but no luck. If I use both then I am getting an error message client is already released.
Below is my socket.js file code:
var express = require('express');
const connection = require('./dbpool.js');
if(arData == '0022'){
const queryText = "INSERT INTO alert(alert_data) VALUES('"+arData+"')";
connection.query(queryText,(err, res) => {
if(err){
console.log(err.stack);
}
});
}
if(arData == '0011'){
const queryText = "INSERT INTO table2(alert_data) VALUES('"+arData+"')";
connection.query(queryText,(err, res) => {
if(err){
console.log(err.stack);
}
});
}
function ReverseCommunication(){
const select1 = "SELECT * FROM alert WHERE action = '0' ORDER BY alert_id ASC LIMIT 1";
connection.query(select1, (err, res) =>{
if(err) {
console.log("Error1");
res.json({"error":true});
}
else{
console.log("res==",res);
}
});
}
setInterval(function(){
ReverseCommunication();
}, 2000)
With pool you shouldn't need to close the connection. With pool it will reuse the connection pool for subsequent request so you don't have to connect to the DB each time.
(i'm not a PG expert here, sure other could expand on that way better then I )
What works for us is to set up the dbpool file you have like this
const {Pool,Client} = require('pg');
const pool = new Pool({
user: process.env.POSTGRES_USER,
host: process.env.POSTGRES_URL,
database: process.env.POSTGRES_DATABASE,
password: process.env.POSTGRES_PASSWORD,
port: process.env.POSTGRES_PORT,
keepAlive: true,
connectionTimeoutMillis: 10000, // 10 seconds
max: 10
});
pool.connect()
.then(() => console.log('pg connected'))
.catch(err => console.error(err))
module.exports = pool
Then use the pool.query like you have now with pool.connect
Also, just a side note what lib are you using for PG? Noticed your queries are dynamic, you may want to adjust those to prevent possible SQL-injection.
I'm very new to testing and I can't figure out how to test this scenario. I use supertest and Jest for testing. In my Express app I use jwt authentication. I store a jwt token that identifies a user in a http only cookie, and it's been created when a user creates and account or logs in.
Now, I have a route that is responsible for a password change requested by a user:
router.post('/reset-password', async function (req, res, next) {
try {
//relevant to the question part here
//
const { userToken } = req.cookies
if (!userToken) {
res.status(401).json({ error: 'unauthorized' })
return
}
const { email } = jwt.verify(userToken, process.env.JWT_SECRET)
/////////////////////
} catch (err) {
res.status(401).json({ error: 'unable to verify' })
}
})
As I understand, in order to test this function, I need to set a cookie in beforeAll. I tried doing so by registering a user that normaly sets required token. Here is my test code:
const request = require('supertest')
const app = require('../app')
const MongoClient = require('mongodb').MongoClient
const client = new MongoClient(`mongodb://localhost:27017/img-test`, {
useNewUrlParser: true,
useUnifiedTopology: true,
})
beforeAll(async () => {
await client.connect()
app.locals.db = client.db('img-test')
await request(app).post('/register').send({
username: 'Bob',
email: 'bob#email.com',
password: 'Hardpass0!',
checkboxTerms: 'on',
'g-recaptcha-response': 'asdasd',
})
})
describe('Password reset', () => {
test('Should reset password', async () => {
await request(app)
.post('/api/account/reset-password')
.send({
passwordCurrent: 'Hardpass0!',
passwordNew: 'Hardpass1!',
})
.expect(200)
})
})
afterAll(async () => {
let db = app.locals.db
await db.dropDatabase()
client.close()
})
And of course, it fails. userToken is going to be undefined because as I understand supertest is not setting an http only cookie when a user is registered, and there is no token on req.cookies? How do I test this scenario? What am I doing wrong? Thanks for help.
I try to test socket.io using mocha, but i have a problem when make a socket call on. I think socket is not connected, because the console log not triggered but the console.log outside the socket call is show. Or it's wrong about how i use the done() method ?
const { assert } = require('chai');
const ioServer = require('socket.io');
const ioClient = require('socket.io-client');
const http = require('http');
const socketUrl = 'http://localhost:5000';
const options = {
transports: ['websocket'],
'force new connection': true,
};
let server;
let client;
let httpServer;
describe('Socket', () => {
beforeEach(done => {
httpServer = http.createServer().listen();
server = ioServer(httpServer);
client = ioClient.connect(socketUrl, options);
server.on('connection', socket => {
console.log('connected server');
socket.emit('echo', 'Hello World');
});
client.on('echo', message => {
console.log('connected client');
console.log(message);
});
console.log('test');
done();
});
afterEach(done => {
server.close();
client.close();
done();
});
it('It should connect socket', done => {
client.on('echo', message => {
assert.equal(message, 'Hello World');
});
done();
});
});
Here's the screenshoot cli terminal when running mocha.
Mocha