Manage many connections from nodejs to postgres with pg - javascript

I want to execute a script that will perform a lot of queries and I use pg to manage the connections. The problem is that when my pool is full, my program stops and doesn't queue the future queries. I try to set the poolSize at 0 but it doesn't work either.
pg.defaults.poolSize = 100000000;
var pgQuery = function(query) {
return new Promise(function(resolve, reject) {
pg.connect(conString, function(err, client) { // When the pool is full, pg doesn't connect to the DB
if(err) {
console.error('error fetching client from pool', err);
reject(err);
}
client.query(query,
function(err, res) {
if(err) {
console.error('error running query', err);
reject(err);
}
resolve(res);
});
});
});
};
Any suggestions?

I found my problem, I wasn't releasing the clients back to the pool. For this I had to call the done() callback after my query was executed.
var pgQuery = function(query) {
var attempt = 0;
return new Promise(function(resolve, reject) {
// I added the done callback in parameter.
pg.connect(conString, function(err, client, done) {
if(err) {
console.error('error fetching client from pool', err);
attempt++;
if (attempt < 11) {
pgQuery(query);
} else {
reject(err);
}
}
client.query(query,
function(err, res) {
//call `done()` to release the client back to the pool
done();
if(err) {
console.error('error running query', err);
reject(err);
}
resolve(res);
});
});
});
};
Also my request is cpu intensive and takes a long time to execute. Because of that my request was called twice, I fixed this with
// Without this line the http request is triggered twice
req.connection.setTimeout(15*60*1000)

Related

ExpressJS MySql Pool Connection Query Return

I am just starting in ExpressJS. So here console.log works, so if I make a call, I do see it in terminal result dataset printed. But, it doesn't return it as response. I do console.log and return but still get no data in response..
index.js
...
app.get("/", async (req, res) => {
let data = await queries();
res.json(data);
});
...
Also tried
...
app.get("/", (req, res) => {
res.json(queries());
});
...
queries.js
function main(sql) {
return pool.getConnection((err, connection) => {
if (err) throw err;
return connection.query(sql, (errt, rows) => {
connection.release(); // return the connection to pool
if (errt) throw err;
console.log(rows);
return rows;
});
});
}
function queries() {
let Q = "SELECT * FROM `items`";
return main(Q);
}
I believe the problem is in the function main() if you do see the result in the terminal. Your functions in the index.js file seems okay. The issue is probably due to the promise not being sent back from the main() function in queries.js file. You need to do 2 things -
Wrap the functionality of the main() in a Promise and return that promise. The callback function of the Promise object takes resolve(use this to return result) and reject(use this to return error).
As the main() will return a promise, you queries() function needs to be async, therefore needs to await the return from the main().
I have attached an Image to show how I implemented this for a DNS-lookup function. You will find some similarities.
// Notice the resolve and reject are functions
function main(sql) {
return new Promise((resolve, reject) => {
pool.getConnection((err, connection) => {
if (err) throw err;
connection.query(sql, (errt, rows) => {
connection.release(); // return the connection to pool
if (errt) throw err;
console.log(rows);
resolve(rows);
});
});
});
}
async function queries() {
let Q = "SELECT * FROM `items`";
let result = await main(Q);
return result;
}
I believe this should work. Give it a try and good luck!

Node mysqljs. How to properly close connections

I'm using mysqljs to access MySQL with javascript.
I would like to point out that this process seems to work fine if a single piece of data.
I am feeding into my code a large set of data, to be processed line by line as a batch.
I create my connection like this:
var connection = mysql.createConnection({
//debug: ['ComQueryPacket'],
host : dataSource.host,
user : dataSource.user,
password: dataSource.password,
database: dataSource.database
});
I have three functions that make database queries.
The function containing the SELECT query is built like this:
dbSearch(data){
var sql = "SELECT * from table where field =? and otherfield=?";
return new Promise((resolve, reject) => {
connection.query(sql, [data[0], data[1], (error, results, fields) => {
if (error){
console.log(error);
reject("Database connection error: " + error);
} else {
resolve(results);
}
});
});
}
The code executes in another function:
if (dataItem){
dbSearch(dataItem)
.then((row) => {
processingfunction(row);
});
If I leave out connection.end() the code hangs and the stream of data is held up at the first item being processed.
If I put connection.end() inside the function, i get this error:
Database connection error: Error: Cannot enqueue Query after invoking quit.
I put connection.end() as the last line of the code, everything works fine
The problem though is for the update and insert functions:
updateRecord(data){
var sql = "UPDATE table set field=? where id=?";
return new Promise((resolve, reject) => {
connection.query(sql, [data[0], data[1], (error, results, fields) => {
if (error){
console.log(error);
reject("Database connection error: " + error);
} else {
resolve(results);
}
});
});
}
inputRecord(data){
var sql = "INSERT INTO table (field1, field2, field3) VALUES(?,?,?)";
return new Promise((resolve, reject) => {
connection.query(sql, [data[0], data[1], data[2]], (error, results, fields) => {
if (error){
console.log(error);
reject("Database connection error: " + error);
} else {
resolve(results);
}
});
});
}
With connection.end() in the function I get this error.
Database connection error: Error: Cannot enqueue Query after invoking quit.
(node:40700) UnhandledPromiseRejectionWarning: Database connection error: Error: Cannot enqueue Query after invoking quit.
(node:40700) UnhandledPromiseRejectionWarning: Unhandled promise rejection. This error originated either by throwing inside of an async function without a catch block, or by rejecting a promise which was not handled with .catch(). (rejection id: 1)
(node:40700) [DEP0018] DeprecationWarning: Unhandled promise rejections are deprecated. In the future, promise rejections that are not handled will terminate the Node.js process with a non-zero exit code.
Based on the documentation, I have no clarity on how to properly handle closing the connection so that the code can process properly.
Not sure what I am doing wrong. Could use some mentorinng from someone experienced with using the connections to process chunks of data and how to properly handle closing the connections?
NOTE:
A similar problem happens when I try connection pooling, so that was not a workable solution.
You using connection pool, you don't have to close the conn manually. There is a .query() convenient method. Since you're already using .query(), don't close it at the end of the function.
This is a shortcut for the pool.getConnection() -> connection.query() -> connection.release() code flow.
var mysql = require('mysql');
var pool = mysql.createPool({
connectionLimit : 10,
host : 'example.org',
user : 'bob',
password : 'secret',
database : 'my_db'
});
pool.query('SELECT 1 + 1 AS solution', function (error, results, fields) {
if (error) throw error;
console.log('The solution is: ', results[0].solution);
});
You are using an implicit connection. Meaning, you are attempting to query the database without explicitly connecting first. This means that each time you run this line:
reject("Database connection error: " + error);
You can't be certain that it was a connection error. It may have been a query error. I believe it is always best to explicitly create/destroy your connections.
inputRecord(data){
var sql = "INSERT INTO table (field1, field2, field3) VALUES(?,?,?)";
return new Promise((resolve, reject) => {
connection.connect(function(err){
if(err) reject("Database connection error: " + error);
connection.query(sql, [data[0], data[1], data[2]], (error, results, fields) => {
connection.end(); // Now you are certain a connection was made, and can be terminated
if (error){
console.log(error);
reject("Database connection error: " + error);
} else {
resolve(results);
}
});
});
});
}
Because this is a pattern that will be used over and over, I would suggest moving it to it's own function:
executeQuery(query, params){
return new Promise((resolve, reject) => {
connection.connect(function(err){
if(err) reject("Database connection error: " + error); // Connection error with certainty
connection.query(query, params, (error, results, fields) => {
connection.end(); // Now you are certain a connection was made, and can be terminated
if (error){
console.log(error);
reject("Database query error: " + error); // Query error
} else {
resolve(results);
}
});
});
});
}
And then simply call the function:
inputRecord(data){
var sql = "INSERT INTO table (field1, field2, field3) VALUES(?,?,?)";
return executeQuery(sql,[data[0],data[1],data[2]]);
}
updateRecord(data){
var sql = "UPDATE table set field=? where id=?";
return executeQuery(sql,[data[0],data[1]]);
}
dbSearch(data){
var sql = "SELECT * from table where field =? and otherfield=?";
return executeQuery(sql,[data[0],data[1]]);
}
To use connection pooling, executeQuery would become:
executeQuery(query, params){
return new Promise((resolve, reject) => {
pool.query(query, params,function(err,res){ // shortcut for connect, query,end - no need to terminate the connection
if(err) reject(err);
else resolve(res);
});
});
}
This should solve your problem, but if not, it should at least help narrow down the possibilities by breaking out the error conditions and bringing all query related code into the same function.
Checking for connection first
getConnection(){
return new Promise((resolve, reject) => {
if(connection.state === 'connected') resolve(connection);
else {
connection.connect(function(err) => {
if(err) reject ("Connection error: " + error);
else resolve(connection);
});
}
});
}
executeQuery(query, params){
return new Promise((resolve, reject) => {
getConnection().then(connection =>{
connection.query(query, params, (error, results, fields) => {
connection.end(); // Now you are certain a connection was made, and can be terminated
if (error){
console.log(error);
reject("Database query error: " + error); // Query error
} else {
resolve(results);
}
});
});
});
}

NodeJs: Run Mysql queries synchronously

I'm using NodeJS and mysql2 to store data in my database. but their are times when I need to perform database saves synchronously, like this example:
if(rent.client.id === 0){
//Save client
connection.query('INSERT INTO clients (name, identity, licence, birthDate, address, phoneNumber, email) VALUES (?,?,?,?,?,?,?)',
[/*Array of values*/],
function (err, results) {
if (err) throw err;
//Retrieve client id to use it in the next database save
rent.client.id = results.insertId;
})
}
//Save rent
connection.query('INSERT INTO rents (deliveryDate, returnDate, gasLevel, deliveryPoint, deliveryPrice, unitPrice, state, clientID, carID) VALUES (?,?,?,?,?,?,?,?,?)',
[/*Array of values that contain the last inserted id clients*/],
function (err, results) {
if (err) throw err;
console.log('rent saved', results);
})
So how can I perform these two database saves synchronously. I don't think that doing it in the following manner is good code:
connection.query(queryString,
[queryValues],
function (err, results) {
if (err) throw err;
connection.query(queryString,
[queryValues],
function (err, results) {
if (err) throw err;
console.log('rent saved', results);
})
})
So what kind of solutions do you propose?
I don't think that doing it in the following manner is good code
It isn't, but only because of the
if (err) throw err;
part, which will not do anything useful. (It certainly won't make your function making these query calls throw an exception; it can't, you function has already returned. All it does is throw an exception from the callback; query probably ignores it.)
Other than that, it's the correct way to do this with NodeJS-style callbacks. More specifically:
function myFunctionToDoThisWork(callback) {
connection.query(queryString1,
[queryValues1],
function (err, results) {
if (err) {
callback(err);
return;
}
connection.query(queryString2,
[queryValues2],
function (err, results) {
if (err) {
callback(err);
return;
}
console.log('rent saved', results);
});
});
}
There are couple of things you can do to make that code easier to maintain:
One is to use promises, which you can use on any vaguely-recent version of Node (or via an npm module). First we'd give ourselves a Promise-enabled version of query. In Node v8 and above, you can do that like this:
const promisify = require("utils").promisify;
// ...
const queryPromise = promisify(connection.query.bind(connection));
Alternately there's the promisify npm module, or really this basic version is really trivial:
function promisify(f) {
return function() {
var t = this;
return new Promise(function(resolve, reject) {
var args = Array.prototype.slice.call(arguments);
args.push(function(err, data) {
if (err) {
reject(err);
} else {
resolve(data);
}
});
f.apply(t, args);
});
};
}
Then:
function myFunctionToDoThisWork() {
return queryPromise(queryString1, [queryValues1])
.then(() => {
return queryPromise(queryString2, [queryValues2]);
})
.then(() => {
console.log('rent saved', results);
});
});
}
then consume it via:
myFunctionToDoThisWork().then(/*...*/).catch(/*...*/);
On Node v8 and higher, you can take that further with async and await:
async function myFunctionToDoThisWork() {
await queryPromise(queryString1, [queryValues1]);
await queryPromise(queryString2, [queryValues2]);
console.log('rent saved', results);
}
If you call it from an async function, you'd consume it via await. If calling it from a non-async function, you consume it just like the promise version above (via then).

Handle Success/Error Responses from ssh2 using Promises

I'm building a node.js app which in production will act as a SSH client to many servers, some of which may be inaccessible at any given time. I'm trying to write a function which attempts to run a SSH command with each client in its config upon startup, and I'm not able to handle both successful sessions and those which end in error. I wrapped a ssh2 client in a promise. If I remove the third (trash) server and only successes result, this works fine! See the output:
STDOUT: Hello World
STDOUT: Hello World
Session closed
Session closed
Successful session: Hello World,Hello World
But if one of the connections times out, even though I handle the error, I don't get to keep any of my data. It looks like the error message overwrites all of the resolved promises
Successful session: Error: Timed out while waiting for handshake,Error:
Timed out while waiting for handshake,Error: Timed out while waiting
for handshake
Here's my code, forgive me if this is a bit scattered, as I've combined a few of my modules for the sake of this question. My goal is to keep the data from the successful session and gracefully handle the failure.
var Client = require('ssh2').Client;
const labs = {
"ny1": "192.168.1.2",
"ny2": "192.168.1.3",
"ny3": "1.1.1.1"
};
function checkLabs() {
let numLabs = Object.keys(labs).length;
let promises = [];
for(i=0;i<numLabs;i++){
let labName = Object.keys(labs)[i];
promises.push(asyncSSH("echo 'Hello World'", labs[labName]));
}
Promise.all(promises.map(p => p.catch(e => e)))
.then(results => console.log("Successful session: " + results))
.catch(e => console.log("Error! " + e));
}
var sendSSH = function (command, dest, callback) {
var conn = new Client();
conn.on('ready', function() {
return conn.exec(command, function(err, stream) {
if (err) throw err;
stream.on('data', function(data) {
callback(null, data);
console.log('STDOUT: ' + data);
}).stderr.on('data', function(data){
callback(err);
console.log('STDERR: ' + data);
}).on('close', function(err) {
if(err) {
console.log('Session closed due to error');
} else {
console.log('Session closed');
}
});
stream.end('ls -l\nexit\n');
});
}).on('error', function(err){
callback(err);
}).connect({
host: dest,
port: 22,
username: 'root',
readyTimeout: 10000,
privateKey: require('fs').readFileSync('link-to-my-key')
});
};
function asyncSSH(command, dest) {
return new Promise(function(resolve, reject){
sendSSH(command, dest, function(err,data) {
if (!err) {
resolve(data);
} else {
reject(err);
}
});
});
}
checklabs();
How can I better use this promise wrapper to handle whatever errors come from the ssh2 client? Any tips are appreciated.
To get best use from each connection, you can (and arguably should) promisify separately :
the instatiation of each Client() instance
each instance's conn.exec() method (and any other asynchronous methods as required)
This will allow each instance of Client() to be reused with different commands (though not required in this example).
You should also be sure to disconnect each socket when its job is done, by calling client_.end(). For this, a "disposer pattern" is recommended.
With those points in mind and with a few assumptions, here's what I ended up with :
var Client = require('ssh2').Client;
const labs = {
"ny1": "192.168.1.2",
"ny2": "192.168.1.3",
"ny3": "1.1.1.1"
};
function checkLabs() {
let promises = Object.keys(labs).map((key) => {
return withConn(labs[key], (conn) => {
return conn.execAsync("echo 'Hello World'")
.catch((e) => "Error: " + e.message); // catch in order to immunise the whole process against any single failure.
// and inject an error message into the success path.
});
});
Promise.all(promises)
.then(results => console.log("Successful session: " + results))
.catch(e => console.log("Error! " + e.message)); // with individual errors caught above, you should not end up here.
}
// disposer pattern, based on https://stackoverflow.com/a/28915678/3478010
function withConn(dest, work) {
var conn_;
return getConnection(dest).then((conn) => {
conn_ = conn;
return work(conn);
}).then(() => {
if(conn_) {
conn_.end(); // on success, disconnect the socket (ie dispose of conn_).
}
}, () => {
if(conn_) {
conn_.end(); // on error, disconnect the socket (ie dispose of conn_).
}
});
// Note: with Bluebird promises, simplify .then(fn,fn) to .finally(fn).
}
function getConnection(dest) {
return new Promise((resolve, reject) => {
let conn = promisifyConnection(new Client());
conn.on('ready', () => {
resolve(conn);
})
.on('error', reject)
.connect({
host: dest,
port: 22,
username: 'root',
readyTimeout: 10000,
privateKey: require('fs').readFileSync('link-to-my-key')
});
});
}
function promisifyConnection(conn) {
conn.execAsync = (command) => { // promisify conn.exec()
return new Promise((resolve, reject) => {
conn.exec(command, (err, stream) => {
if(err) {
reject(err);
} else {
let streamSegments = []; // array in which to accumulate streamed data
stream.on('close', (err) => {
if(err) {
reject(err);
} else {
resolve(streamSegments.join('')); // or whatever is necessary to combine the accumulated stream segments
}
}).on('data', (data) => {
streamSegments.push(data);
}).stderr.on('data', function(data) {
reject(new Error(data)); // assuming `data` to be String
});
stream.end('ls -l\nexit\n'); // not sure what this does?
}
});
});
};
// ... promisify any further Client methods here ...
return conn;
}
NOTES:
the promisification of conn.exec() includes an assumption that data may be received in a series of segments (eg packets). If this assumption is not valid, then the need for the streamSegments array disappears.
getConnection() and promisifyConnection() could be written as one function but with separate function it's easier to see what's going on.
getConnection() and promisifyConnection() keep all the messy stuff well away from the application code.

How to use callback function inside a function?

I want to call a callback function inside a function.so i don't know how to do that
function call(){
pg.connect(conString, function(err, client, done) {
if(err) {
return console.error('error fetching client from pool', err);
}
client.query('INSERT into post1 (data) VALUES ($n)', function(err, result) {
//call `done()` to release the client back to the pool
done();
if(err) {
return console.error('error running query', err);
}
console.log(result.rows[0].number);
//output: 1
});
});
}
board.on("ready", function() {
// Create a new generic sensor instance for
// a sensor connected to an analog (ADC) pin
var sensor = new five.Sensor("A0");
// When the sensor value changes, log the value
sensor.on("change", function() {
var n = this.value();
//i want to call that function here
});
});
and i also want to call this function in another callback function is this the correct way to do or suggest me the right one.
You could do something like this, where you are passing a function into your function. So callback would be a function in this case.
function call(callback){
pg.connect(conString, function(err, client, done) {
if(err) {
return console.error('error fetching client from pool', err);
}
client.query('SELECT $1::int AS number', ['1'], function(err, result) {
//call `done()` to release the client back to the pool
done();
callback(); //execute here or wherever
if(err) {
return console.error('error running query', err);
}
console.log(result.rows[0].number);
//output: 1
});
});
}
then you could call it like
call(function(){
//some logic here.
})
or:
var someFunction = function()
{
//do something
}
call(someFunction);

Categories

Resources