Can not close postgreSQL connection within Node.js script - javascript

I am writing small node.js server for helping maintaining build machines. It's basically for testers to be able to drop db or restart server remotely. I have some issues with pg connections. Can anybody have an idea why it is not being closed after first request?
var client = new pg.Client(conString);
var server = http.createServer(function (req, res) {
var url = parse(req.url);
if (url.pathname =='/'){
(...)
}else{
var slash_index = url.pathname.indexOf('/',1);
var command = url.pathname.slice(1,slash_index);
if (command =='restart'){
res.write('restarting server please wait');
} else if (command == 'drop-db'){
console.log('drop-db');
client.connect();
console.log('connect');
var query = client.query("select datname from pg_database;", function(err, result) {
if (err) throw err;
console.log('callback');
});
query.on('end', function() {
console.log('close');
client.end();
});
} else{
res.write('unknown command : '+ command);
}
res.write('\n');
res.end();
}
}).listen(5337);
So what I get on screen after first request is :
drop-db
connect
callback
close
great but after next request I get only
drop-db
connect
after next one I already get an pg error
what do I do wrong?
Edit : No errors after second commit . Error after third :
events.js:48
throw arguments[1]; // Unhandled 'error' event
^
error: invalid frontend message type 0
at [object Object].<anonymous> (/home/wonglik/workspace/server.js/node_modules/pg/lib/connection.js:412:11)
at [object Object].parseMessage (/home/wonglik/workspace/server.js/node_modules/pg/lib/connection.js:287:17)
at Socket.<anonymous> (/home/wonglik/workspace/server.js/node_modules/pg/lib/connection.js:45:22)
at Socket.emit (events.js:88:20)
at TCP.onread (net.js:347:14)
I think it is related to opening new connection while old is still on.
Edit 2 :
I've checked postgres logs :
after second request :
2012-03-13 09:23:22 EET LOG: invalid length of startup packet
after third request :
2012-03-13 09:24:48 EET FATAL: invalid frontend message type 0

It looks like client (pg.Client) is declared outside the scope of a request, this is probably your issue. It's hard to tell from the code snippet, but it looks like you might have issues with scoping and how async callback control flow works in general, e.g. calling res.end() while callbacks are still in the IO queue. This is totally legal with node, just not sure that is your intent.
It is preferred to use pg.connect which returns a client. see https://github.com/brianc/node-postgres/wiki/pg
var pg = require('pg');
var server = http.createServer(function (req, res) {
var url = parse(req.url);
if (url.pathname =='/'){
(...)
}else{
var slash_index = url.pathname.indexOf('/',1);
var command = url.pathname.slice(1,slash_index);
if (command =='restart'){
res.write('restarting server please wait');
} else if (command == 'drop-db'){
console.log('drop-db');
pg.connect(conString, function(err, client) {
console.log('connect');
var query = client.query("select datname from pg_database;", function(err, result) {
if (err) throw err;
console.log('callback');
});
query.on('end', function() {
console.log('close');
// client.end(); -- not needed, client will return to the pool on drain
});
});
} else{
res.write('unknown command : '+ command);
}
// these shouldn't be here either if you plan to write to res from within the pg
// callback
res.write('\n');
res.end();
}
}).listen(5337);

I was getting this error, similar to you, and it was that the connection wasn't closed. When you attempt to (re)connect via an already open connection, things go boom. I would suggest that you use the direct connection stuff, since you don't seem to need the pooling code - might make it easier to trap the problem. (Though, given that I've resurrected an older post, I suspect that you probably already fixed this.)

Related

SailsSocket Keep Trying to Reconnect

After struggling with socket.io connection authentication (here and here) and thanks to #sgress454, I realized how to get this to work and I am sending the authentication/authorization token as part of the query in the connection (see below).
Upon authentication failure (invalid/expired token or in-active user), I return the callback with false parameter to indicate the connection is rejected.
On the client side though, I am not sure how I should handled it and it seems the socket is trying to reconnect even after explicitly disconnecting - I keep seeing that it is trying to reconnect.
The client code is something like this:
var _onConnectError = function(err) {
if (err.description == 400) {
console.log("Connection rejected");
_disconnectAndCleanupSocket();
} else {
console.log("##SOCKET - connect_error", err.description, err);
}
}
var _disconnectAndCleanupSocket = function() {
if (io.socket) {
if (io.socket.isConnected()) {
io.socket.disconnect();
}
io.socket.removeAllListeners();
delete io.socket;
}
};
io.socket = io.sails.connect({ query: "token=" + token});
io.socket.on('connect', _onConnect);
io.socket.on('connect_error', _onConnectError);
io.socket.on('reconnect_error', _onConnectError);
On the server (config/sockets.js) I have:
beforeConnect: function(handshake, cb) {
var token = handshake._query ? handshake._query.token : null;
CipherService.verifyToken(token, function verifyTokenResults(err, decoded, info) {
if (err || !decoded) {
if (err.name === "TokenExpiredError") {
// token expired - user can't connect...
return cb(null, false);
} else {
// some other error...
return cb(err, false);
}
}
AuthUser.findOne(decoded.user.id).exec(function(err, user) {
if (err || !user || !user.is_active) {
return cb(null, false);
}
return cb(null, true);
});
});
// (`false` would reject the connection)
},
I have tried to find documentation and explored the response object (in developer tools) but the only thing I saw there was thedescription field which return 400 on rejection and 0 in case there is no response (e.g. server is down).
Is there some example/documentation for this? Overall, I didn't find detailed description of using the SailsSocket in non-standard cases (other then use io.sails.connect()).
What is the proper way to handle such rejection (and shouldn't it handle it as part of the sails socket.io client?)
As an aside, I cannot instantiate SailsSocket myself and only do this with the 'io.sails.connect()' function. Is that on purpose? Is there no option to "miss" an event when I create the socket with the connect method and only then assign event handlers?
The short answer to your question is that you can set the reconnection flag to turn automatic reconnection on or off:
// Disable auto-reconnect for all new socket connections
io.sails.reconnection = false;
// Disable auto-reconnect for a single new socket connection
var mySocket = io.sails.connect({reconnection: false});
As far as SailsSocket creation, you are correct in that io.sails.connect() is the only way to create a SailsSocket. Since the connection is asynchronous, any event handlers you bind immediately after calling connect will be added before the actual connection takes place, so you won't miss any notifications.

How to stop infinite loop caused by callback in asynchronous function

I have two async functions defined in my program; one of them pings a given IP address(using the net-ping module) and 'returns' (through a callback) whether the ping was successful, and the other creates an SSH connection to a given IP address (using the ssh2 module) and also 'returns' whether the connection was successful or not.
My problem arises when I try to read in IP data from a data.txt file using the readline functionality. I use readline module to read in each line from the file, use the callbacks to call ping and ssh in a blocking fashion, and then place these return values into an array to use later. I've been able to verify that the functions are executed in the order I expect, and that the return values are truly returned.. However, when the program reaches the end of the file - instead of terminating, it just reads the file again, and again, and again.
The SSH function is defined as:
function SSH(ipAdress, callback) {
connection.on('ready', function(err) {
if(err) {
throw err;
returnValue = "ErrorWithReadyConnection";
callback(null, returnValue);
}
//runs the uptime command once we have SSH'd into the machine
connection.exec('uptime', function(err, stream) {
if(err) {
throw err;
returnValue = "ErrorRunningCommandFromSSH";
callback(null, returnValue);
}
else {
stream.on('close', function(code, signal) {
//code holds the response from running 'uptime'
console.log("Stream on close. Code : " + code +
". Signal: " + signal);
connection.end();
returnValue = "success";
callback(null, returnValue);
}).on('data', function(data) {
console.log("STDOUT: " + data);
}).stderr.on('data', function(data) {
console.log("STDERR: " + data);
});
}
});
//Parameters for the SSH connection
}).connect({
host: ip,
port: 22,
username: userName,
privateKey: privateKey,
passphrase: passPhrase
}); //connect
//handle any connection errors done whilst creating a connection
connection.on('error', function(err) {
if(err) {
returnString = "ErrorWaitingForHandshake";
callback(null, returnString);
}
}); //end connect
}
The ping function is defined as:
function pingFunc(ip, callback) {
var returnString;
//Create a ping session, passing the IP of the machine to ping
sessions.pingHost(ip, function(error, target) {
//if error, then examine what type of error has occured
if(error) {
if(error instanceof ping.RequestTimedOutError) {
returnString = "RequestTimedOut";
}
//else, different error - output the error string.
else {
returnString = "Error";
}
} //End error handling
//else, ping was successful
else {
returnString = "Alive";
}
callback(null, returnString);
});
//return returnString;
}
And the code where I call the functions is:
var allMachines = new Array();
var lineReader = require('readline').createInterface({
input: require('fs').createReadStream('data.txt');
});
lineReader.on('line', function(line) {
pingFunc(line, function(err, pingResult) {
SSH(line, function(err, sshResult) {
var machineTemp = [{'ping': pingResult, 'ssh': sshResult }];
allMachines = allMachines.concat(machineTemp);
})
})
}
I plan on later using the allMachines array to create a JSON file, however this infinite loop is halting all potential progress. I've tried to move the connection.on('error', ...) in the SSH function which I think is causing the infinite loop, however this has proved to be fruitless.
Any help would be greatly appreciated - thanks!
P.S If anyone knows how I would be able to register when readlines has finished, and the allMachines array has been filled with the necessary data I would also be very grateful! I've tried to use readline.on('close', ...), however this gets called before SSH and ping finish executing, so is no use to me! Thanks again

Express app.post getting called multiple times

I have been building a youtube video conversion app which streams youtube videos using youtube-dl and saves them, everything was working fine until I attempted to stream a video that was over an hour long. When the task was anywhere between 50% - 100% complete or 40-80seconds in, would be when the entire block of code would get re-run resulting in multiple simultaneous streams occurring. The response can therefor never get sent as it waits for the pipe to finish. Adding next(); outside the stream function allowed the conversion to complete with out any interruption or reruns of the code block, however it resulted in the following error when attempting to send the response:
throw new Error('Can\'t set headers after they are sent.');
This is the Node.js code block in question:
app.post('/convert/', function (req, res, next){
var url = 'http://www.youtube.com/'+req.body.url;
var quality = req.body.quality;
var socketId = req.body.socketId;
stream = youtubedl(url,
['-f ' + quality],
// // Additional options can be given for calling `child_process.execFile()`.
{ cwd: __dirname });
stream.on('info', function(info) {
console.log('Download started');
console.log('filename: ' + info._filename);
console.log('size: ' + info.size);
console.log('format: ' + info.format);
var fileName = info._filename;
var videoId = info.id;
var videoTitle = info.title;
videoTitle = videoTitle.replace(/[^a-zA-Z0-9\s]/g, '');
console.log(videoTitle);
var mp4 = 'downloads/'+videoTitle+'-'+info.format_id+'.mp4';
fs.writeFile(mp4, "file", function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
var stat = fs.statSync(mp4);
var str = progress({
length: info.size,
time: 100
});
str.on('progress', function(progress) {
io.to(global.socketid).emit('progressVideo',{progress: progress.percentage});
console.log(info.size);
console.log(progress.transferred);
console.log(progress.percentage);
console.log(progress.remaining);
});
var pipe = stream.pipe(str).pipe(fs.createWriteStream(mp4));
pipe.on('finish', function () {
console.log("stream finished.");
res.json(videoTitle+'-'+info.format_id+'.mp4');
});
});
});
// next();
});
Called by some Angular code.
// Sends youtube link to backend
$scope.getVideo = function(youtubeLink, resolution){
var cleanedLink = youtubeLink.substring(24);
var url = {url: cleanedLink, quality: resolution};
$http.post('/convert/', url).success(function (response){
// Do some stuff
});
}
Confused as to why it was getting run more then once, I slowly removed more and more code until I was left with this simple test.
app.post('/convert/', function (req, res, next){
console.log('hello!');
});
Which was called by an ng-click event and after waiting a minute or so the console also printed out two and then three hello! statements. I am completely lost as to why this happens. If anyone could shed some light on this for me it would be greatly appreciated.
So after just getting down to a very basic post route and logging out some text but not returning a response, I came to the conclusion the issue resides with node. I decided to record the length of time between the console.log statements which turned out to be every 2 minutes. With this I was able to find out that node has a default timeout of 2 minutes if a response is not sent back to the client.
I was able to set the response to never timeout with the following code:
res.connection.setTimeout(0);
I hope this helps anyone else that needs to hold connections for large periods of times for file conversions/transfers etc...

Node.js process cannot recover after MySQL turned off, then turn on

I am using Node.js with MySQL and restify.
I have the following code which is run as part of a REST API. It works fine.
server.get('/test', function (req, res, next) {
var query_string =
"SELECT DATE(date_transacted) AS transaction_date, " +
" MonthReports.tb AS MonthReports__tb " +
" FROM monthly_reports MonthReports " +
" WHERE ( date_transacted >= \'2015-01-00\' AND date_transacted <= \'2015-09-00\' ) ";
connection.query(
query_string
, function (err, rows, fields) {
if (err) throw err;
res.send(rows);
});
});
If I deliberately turn off the MySQL database and makes a REST API call which will run the query, I will get the error
Cannot enqueue Query after fatal error.
At this point, I turn on the MySQL database. The node.js process is unable to recover and the same error keeps appearing when I make a REST API call. The REST API server is dead.
What can be done to make the Node.js REST API server code recoverable?
I am assuming you are connecting globally inside your script.
One simple way would be to create a connection per request:
server.get('/test', function (req, res, next) {
var query_string =
"SELECT DATE(date_transacted) AS transaction_date, " +
" MonthReports.tb AS MonthReports__tb " +
" FROM monthly_reports MonthReports " +
" WHERE ( date_transacted >= \'2015-01-00\' AND date_transacted <= \'2015-09-00\' ) ";
var connection = getConnection(function connected(err) {
if (err) {
// error connecting to mysql! alert user
} else {
connection.query(
query_string
, function (err, rows, fields) {
if (err) throw err;
res.send(rows);
});
}
});
});
The above code is psuedo code as i'm not familiar with the node mysql library. This will allow each request to see if mysql is able to be connected to, at the expense of having a connection per web request.
Another strategy could be to check err when you issue a query, and if there is an error try to reestablish the global connection
server.get('/test', function (req, res, next) {
var query_string =
"SELECT DATE(date_transacted) AS transaction_date, " +
" MonthReports.tb AS MonthReports__tb " +
" FROM monthly_reports MonthReports " +
" WHERE ( date_transacted >= \'2015-01-00\' AND date_transacted <= \'2015-09-00\' ) ";
connection.query(
query_string
, function (err, rows, fields) {
if (err) {
// Try to reconnect here instead of throwing error and stopping node process, and reissue query
}
res.send(rows);
});
});
This website gives a complete answer. Credit goes to the writer of this article, not me.
https://www.exratione.com/2013/01/nodejs-connections-will-end-close-and-otherwise-blow-up/
/**
* #fileOverview A simple example module that exposes a getClient function.
*
* The client is replaced if it is disconnected.
*/
var mysql = require("mysql");
var client = mysql.createConnection({
host: "127.0.0.1",
database: "mydb",
user: "username",
password: "password"
});
/**
* Setup a client to automatically replace itself if it is disconnected.
*
* #param {Connection} client
* A MySQL connection instance.
*/
function replaceClientOnDisconnect(client) {
client.on("error", function (err) {
if (!err.fatal) {
return;
}
if (err.code !== "PROTOCOL_CONNECTION_LOST") {
throw err;
}
// client.config is actually a ConnectionConfig instance, not the original
// configuration. For most situations this is fine, but if you are doing
// something more advanced with your connection configuration, then
// you should check carefully as to whether this is actually going to do
// what you think it should do.
client = mysql.createConnection(client.config);
replaceClientOnDisconnect(client);
client.connect(function (error) {
if (error) {
// Well, we tried. The database has probably fallen over.
// That's fairly fatal for most applications, so we might as
// call it a day and go home.
//
// For a real application something more sophisticated is
// probably required here.
process.exit(1);
}
});
});
}
// And run this on every connection as soon as it is created.
replaceClientOnDisconnect(client);
/**
* Every operation requiring a client should call this function, and not
* hold on to the resulting client reference.
*
* #return {Connection}
*/
exports.getClient = function () {
return client;
};
This answer was extracted from another link nodejs mysql Error: Connection lost The server closed the connection
The extracted code;
var db_config = {
host: 'localhost',
user: 'root',
password: '',
database: 'example'
};
var connection;
function handleDisconnect() {
connection = mysql.createConnection(db_config); // Recreate the connection, since
// the old one cannot be reused.
connection.connect(function(err) { // The server is either down
if(err) { // or restarting (takes a while sometimes).
console.log('error when connecting to db:', err);
setTimeout(handleDisconnect, 2000); // We introduce a delay before attempting to reconnect,
} // to avoid a hot loop, and to allow our node script to
}); // process asynchronous requests in the meantime.
// If you're also serving http, display a 503 error.
connection.on('error', function(err) {
console.log('db error', err);
if(err.code === 'PROTOCOL_CONNECTION_LOST') { // Connection to the MySQL server is usually
handleDisconnect(); // lost due to either server restart, or a
} else { // connnection idle timeout (the wait_timeout
throw err; // server variable configures this)
}
});
}
handleDisconnect();

soda.js Selenium RC 2 How to Reuse Browser Session

I working with Soda.js, mocha and selenium RC. I'm trying to speed up my testing and one way I was thinking is since I'm starting a new session for each test (i.e. running through closing/opening an new browser and logging into a site).
I've seen numerious incomplete posts on various forums/message boards about reusing sessions for other languages but my tests are all Javascript.
Does anyone know how I can reuse the previous browser/session once I start my tests so I dont have to start a new session in each test.
My test runner for soda looks like this.
var soda = require('soda'),
util = require('util'),
//config object - values injected by TeamCity
config = {
host: process.env['SELENIUM_HOST'] || 'localhost',
port: process.env['SELENIUM_PORT'] || 4444,
url: process.env['SELENIUM_SITE'] || 'http://google.com',
browser: process.env['SELENIUM_BROWSER'] || 'firefox'
};
describe("TEST_SITE", function(){
beforeEach(
function(done){
browser = soda.createOnPointClient(config);
// Log commands as they are fired
browser.on('command', function(cmd, args){
console.log(' \x1b[33m%s\x1b[0m: %s', cmd, args.join(', '));
});
//establish the session
browser.session(function(err){
done(err);
});
}
);
afterEach(function(done){
browser.testComplete(function(err) {
console.log('done');
if(err) throw err;
done();
});
});
describe("Areas",function(){
var tests = require('./areas');
for(var test in tests){
if(tests.hasOwnProperty(test)){
test = tests[test];
if(typeof( test ) == 'function')
test();
else if (util.isArray(test)) {
for(var i=0, l=test.length;i<l;i++){
if(typeof( test[i] ) == 'function')
test[i]();
}
}
}
}
});
});
I found my answer. I really needed to be concentrating on mocha more for my answer which was along the lines of this:
//before running the suite, create a connection to the Selenium server
before(
function(done){
browser = soda.createOnPointClient(config);
// Log commands as they are fired
browser.on('command', function(cmd, args){
console.log(' \x1b[33m%s\x1b[0m: %s', cmd, args.join(', '));
});
//establish the session
browser.session(function(err){
done(err);
});
}
);
//after each test has completed, send the browser back to the main page (hopefully cleaning our environment)
afterEach(function(done){browser.open('/',function(){
done();
});
});
//after the entire suite has completed, shut down the selenium connection
after(function(done){
browser.testComplete(function(err) {
console.log('done');
if(err) throw err;
done();
});
});
The result so far was that I'm not seeing any real performance gain by reusing the session over starting a new one. My tests still take roughly the same amount of time.

Categories

Resources