Nodejs Cluster with socket.io loses connections - javascript

I am using node.js with cluster module in order to create multiprocess socket.io server.
Using this example I have created the following server-client application:
Server:
var cluster = require('cluster');
// Start master process
if (cluster.isMaster) {
// Create slave workers (in this example there is only one)
var worker = cluster.fork();
// Starts a server that will delegate the connection to the worker
var routerServer = net.createServer(function(connection) {
worker.send({type: 'new_connection'}, connection);
});
routerServer.listen(443, "my-ip-address");
} else {
// Start a server on random Port
var slaveHttp = require("http").Server();
slaveHttp.listen(0, "localhost");
// Connect socket.io to the server
var io = require('socket.io')(slaveHttp);
io.on('connection', function (socket) {
console.log("Connected");
});
// On server messsage (new connection) emit it to the server
process.on('message', function(message, handle) {
if (message.type == 'new_connection') {
slaveHttp.emit('connection', handle);
};
});
}
Client:
var io = require('socket.io-client');
function connect(index) {
connection = io.connect("http://my-ip-address", {
reconnection: true,
reconnectionDelay: 1000,
timeout: 3000,
multiplex: false });
connection.on('connect', function (socket) {
console.log(index + ': Connected');
});
connection.on('connect_error', function (e) {
console.log(index + ': Connection Error: ' + e);
});
connection.on('message', function () {
console.log(index + ': Server message');
});
connection.on('disconnect', function () {
console.log(index + ': disconnect');
});
}
for (var i = 0; i < 10; i++) {
setTimeout(function(index) {
return function() { connect(index); }
}(i), i * 5000);
};
The problem is that some of the clients in the above example managed to connect to the server and exchange messages, but the others failing on timeout, weirder then that I can see in the server's console that it received the connections for the timed-out clients but for some reason they don't succeed to communicate.
If I replace the server code to be on the same process this code runs perfectly.
Can anyone help me?

I found out this is a bug in Node.js, the problem is that there is auto-read machanism in Node.js sockets that starts to read out of every new socket automatically.
When we pass the socket to the child process it can either been read already or not, under heavy load there is more change that the socket will be read by the master (which of course is cuasing the problem), therefore we should explicitly ask to stop reading from this socket.
The following line is a nice workaround (should be added just before worker.send call):
connection._handle.readStop();
Source: https://github.com/joyent/node/issues/7905

Related

Node.js: Socket.io emit method not firing

I'm trying to use socket.io to have a client send a message to the server and then the server then sends some info back. Here's what my code looks like
Server Side
let app = express();
let server = app.listen(3000);
app.use(express.static('public'));
let socket = require('socket.io');
let io = socket(server);
let playerData = [[],[],[],[],[]];
io.sockets.on('connection', (socket) => {
console.log(socket.id);
socket.on("joinRoom",(data) =>{
socket.join(data.room);
console.log("recieved join request by socket: " + socket.id + " " + data.class);
let intRoom = parseInt(data.room) // parses what room to join (from string to int)
socket.to(data.room).emit('newJoin', {id: socket.id, class: data.class}); // Tells room that a new user has joined
socket.emit("success", "You have succesfully joined a lobby");
socket.emit('addPlayers', playerData[intRoom]) // Sends player the data of all other players in the room
playerData[intRoom]
playerData[intRoom].push(data.player);
})
})
Client Side
if(enteringRoom){
// This if statement is being run every frame
if(player.loaded){
console.log('telling server to join room'); // This Fires
socket.emit('joinRoom', {room: lobbySelect.value, class: characterSelect.value, player: player});
enteringRoom = false;
}
camera.position.x = 0; // updates the position of a camera
camera.position.z = -50;
}
socket.on('newJoin', (data) => {
console.log('someone has joined the room');
console.log(data);
})
socket.on('success', (res) =>{
console.log(res);
})
socket.on('addPlayers', (data) => {
console.log("Adding Players...");
for(let i = 0; i< data.length; i++){
console.log[i];
console.log("ADDING A PLAYER");
console.log(data[i]);
data[i].loadAssets();
playersInLobby += 1;
}
console.log(playersInLobby);
})
The problem is that nothing in the joinRoom event actually activates on the server end. None of the console logs even go off. The server however is able to console.log the socket.id on connection; that part works. Any ideas why it's not working?
Edit: This also causes the browser tab to freeze up and eventually crash

readFile not running inside while loop

I am trying to send the contents of a text file through a socket connection every time the text file updates using Express:
console.log('Server running!');
var express = require('express');
var app = express();
var server = app.listen(3000);
var fs = require("fs");
var x = 0;
app.use(express.static('public'));
var socket = require('socket.io');
var io = socket(server);
io.sockets.on('connection', newConnection);
function newConnection(socket) {
console.log("New connection: " + socket.id);
while (true) {
fs.readFile('song.txt', function(err, data) {
if (err) throw err;
console.log(data);
if (data != x) {
var songdata = data;
console.log(songdata);
io.sockets.emit('update', songdata);
x = data;
} else {
console.log("Song is not different:)");
}
})
}
}
Without the while loop, everything works just fine and I recieve the contents in the seperate client. However, now nothing is happening, no console log of data. This indicates the readFile is suddenly no longer running, why?
Thanks:)
First off, some basics. node.js runs your Javascript as single threaded and thus this is a single threaded server. It can only do one thing with your Javascript at a time. But, if you program it carefully, it can scale really well and do lots of things.
Second off, you pretty much never want to do while (true) in server-side Javascript. That's just going to run forever and never let anything else run on your server. Nothing else.
Third, you are attempting to create a new version of that infinite loop every time a new client connects. That's not a correct design (even if there wasn't an infinite loop). You only need one instance of code checking the file, not N.
Now, if you what you're really trying to do is to "poll" for changes in song.txt and notify the client whenever it changes, you need to pick a reasonable time delta between checks on the file and use a timer. This will check that file every so often and let your server run normally all the rest of the time.
Here's a simple version that polls with setInterval():
console.log('Server code started!');
const express = require('express');
const app = express();
const server = app.listen(3000);
const fs = require("fs");
let lastSongData = 0;
app.use(express.static('public'));
const io = require('socket.io')(server);
// get initial songData for future use
// there will not be any connected clients yet so we don't need to broadcast it
try {
lastSongData = fs.readFileSync('song.txt');
} catch(e) {
console.log(e, "\nDidn't find song.txt on server initialization");
}
// here, we create a polling loop that notifies all connected clients
// any time the song has changed
const pollInterval = 60*1000; // 60 seconds, ideally it should be longer than this
const pollTimer = setInterval(() => {
fs.readFile('song.txt', (err, songData) => {
if (!err && songData !== lastSongData) {
// notify all connect clients
console.log("found changed songData");
io.emit('update', songData);
lastSongData = songData;
}
});
}, pollInterval);
io.sockets.on('connection', socket => {
console.log("New connection: " + socket.id);
});
If your songData is binary, then you will have to change how you send the data to the client and how you compare the data to the previous data so you are sending and receiving binary data, not string data and so you are comparing buffers, not strings.
Here's are some references on sending binary data with socket.io:
How to send binary data with socket.io?
How to send binary data from a Node.js socket.io server to a browser client?
A little more efficient way to detect changes to the file is to use fs.watch() which should notify you of changes to the file though you will have to thoroughly test it on whatever platform you are running to make sure it works the way you want. The feature has a number of platform caveats (it does not work identically on all platforms), so you have to test it thoroughly on your platform to see if you can use it for what you want.
console.log('Server code started!');
const express = require('express');
const app = express();
const server = app.listen(3000);
const fs = require("fs");
let lastSongData = 0;
app.use(express.static('public'));
const io = require('socket.io')(server);
// get initial songData for future use
// there will not be any connected clients yet so we don't need to broadcast it
try {
lastSongData = fs.readFileSync('song.txt');
} catch(e) {
console.log(e, "\nDidn't find song.txt on server initialization");
}
// ask node.js to tell us when song.txt is modified
fs.watch('song.txt', (eventType, filename) => {
// check the file for all eventTypes
fs.readFile('song.txt', (err, songData) => {
if (!err && songData !== lastSongData) {
// notify all connect clients
console.log("found changed songData");
lastSongData = songData;
io.emit('update', songData);
}
});
});
io.sockets.on('connection', socket => {
console.log("New connection: " + socket.id);
});
It is unclear from your original code if you need to send the songData to each new connection (whether it has recently changed or not).
If so, you can just change your connection event handler to this:
io.sockets.on('connection', socket => {
console.log("New connection: " + socket.id);
// send most recent songData to each newly connected client
if (lastSongData) {
socket.emit('update', lastSongData);
}
});
Continuously reading the file to detect changes is not a good idea. Instead you should use fs.watch(filename[, options][, listener]) to notify you when the file has changed. When a new socket connects only that socket should have the content broadcast to them, sending it to every client is redundant.
io.sockets.on('connection', newConnection);
var filename = 'song.txt';
function update(socket) {
fs.readFile(filename, function (err, data) {
if (err) throw err;
socket.emit('update', data);
});
}
function newConnection(socket) {
console.log("New connection: " + socket.id);
update(socket); // Read and send to just this socket
}
fs.watch(filename, function () {
console.log("File changed");
update(io.sockets); // Read and send to all sockets.
});

Stop method/interval if user disconnects or condition is met

I want to create a live order page where clients can see the status of their order.
For that reason I want to run a function every 10 seconds that checks the SQL database if the order is ready.
function checkOrder(socket, userid, checkinterval) {
pool.getConnection(function(err, connection) {
// Use the connection
connection.query('SELECT * FROM orders WHERE user = ' + userid + ' ORDER BY timestamp DESC', function(err, rows) {
var alldone = false;
for (var i = 0; i < rows.length; i++) {
if (rows[i]['status'] == 'completed') {
alldone = true;
} else {
alldone = false;
break;
}
}
socket.emit('order-update', rows);
connection.release();
if (alldone) {
console.log('all done');
socket.emit('execute', '$("#orderstatus").html(\'Done\');');
clearInterval(checkinterval);
}
});
});
}
var express = require('express');
var app = express();
var app = express();
var options = {
key: fs.readFileSync('privkey.pem'),
cert: fs.readFileSync('cert.pem'),
ca: fs.readFileSync("chain.pem")
};
var server = require('https').createServer(options, app);
var io = require('socket.io')(server);
var port = 443;
server.listen(port, function() {
console.log('Server listening at port %d', port);
});
io.on('connection', function(socket) {
socket.on('trackorder', function(userid) {
var checkinterval = setInterval(function() {
checkOrder(socket, userid, checkinterval);
}, 10000);
});
socket.on('disconnect', function() {
clearInterval(checkinterval);
});
});
Now I'm having issues on stopping the function if either the job is completed or the client disconnects.
How could I achieve that? I suppose the clearInterval() would work inside the function since it is passed but there is an issue with the on disconnect event handler. Either checkinterval is undefined or if I define it globally it stops the wrong function.
How can this be done properly?
Your checkInterval variable is out of scope when the disconnect event comes. You need to move its definition up a level.
io.on('connection', function(socket) {
// checkInterval variable is declared at this scope so all event handlers can access it
var checkInterval;
socket.on('trackorder', function(userid) {
// make sure we never overwrite a checkInterval that is running
clearInterval(checkInterval);
checkInterval = setInterval(function() {
checkOrder(socket, userid, checkInterval);
}, 10000);
});
socket.on('disconnect', function() {
clearInterval(checkinterval);
});
});
In addition:
I added a guard against overwriting the checkInterval variable if you ever get the trackorder event more than once for the same client.
You mispelled checkinterval in one place.
As others have said, polling your database on behalf of every single client is a BAD design and will not scale. You need to either use database triggers (so it will tell you when something interesting changed) or have your own code that makes relevant changes to the database trigger a change. Do not poll on behalf of every single client.
You have no error handling in either pool.getConnection() or connection.query().
Instead of that complicated setInterval stuff, just add a small IIFE that calls itself if the result isnt there yet. Some pseudocode:
function checkOrder(socket, userid){
//a variable pointing to the running timer
var timer;
//on error clear
socket.on("disconnect", ()=>clearTimout(timer));
//a small IIFE
(function retry(){
pool.getConnection(function(err, connection) {
//parse & notice socket
if (!alldone) //retry
timer = setTimeout(retry, 1000);
});
})();
}
I would say you're using a bad approach. You should go for push rather than pull.
What I mean is, emit the event when status of order changes. Don't put the burden on your database to hit it frequently for no reason.
On successful change of status, emit the event order_status_update with order id and what is the new status
socket.emit('order_status_update', {order_id: 57, status: 'In Process'});
This way you don't need any kind of loop or setinterval etc. No worries even if client is connected or not, its sockat.io business to take care of it. You will just raise the event.

Node js get and set data from different process

I've node application which done spawn(child process) to and application,
the application have host and port:
var exec = require('child_process').spawn;
var child = exec('start app');
console.log("Child Proc ID " + child.pid)
child.stdout.on('data', function(data) {
console.log('stdout: ' + data);
});
child.stderr.on('data', function(data) {
console.log('stdout: ' + data);
});
child.on('close', function(code) {
console.log('closing code: ' + code);
});
some application will start immediately and some application will take some time 10 - 20 sec until they start.
Now I use the node http proxy to run the app and the problem is that Im getting error when the use want to run the app before it up and running.
Any idea how somehow I can solve this issue?
proxy.on('error', function (err, req, res) {
res.end('Cannot run app');
});
Btw, I cannot send response 500 in proxy error due to limitation of our framework. Any other idea how can I track the application maybe with some timeout to see weather it send response 200.
UPDATE - Sample of my logic
httpProxy = require('http-proxy');
var proxy = httpProxy.createProxyServer({});
http.createServer(function (req, res) {
console.log("App proxy new port is: " + 5000)
res.end("Request received on " + 5000);
}).listen(5000);
function proxyRequest(req, res) {
var hostname = req.headers.host.split(":")[0];
proxy.web(req, res, {
target: 'http://' + hostname + ':' + 5000
});
proxy.on('error', function (err, req, res) {
res.end('Cannot run app');
});
}
What you need is to listen for the first response on your proxy and look at its status code to determine whether your app started successfully or not. Here's how you do that:
proxy.on('proxyRes', function (proxyRes, req, res) {
// Your target app is definitely up and running now because it just sent a response;
// Use the status code now to determine whether the app started successfully or not
var status = res.statusCode;
});
Hope this helps.
Not sure if it make sense , In your Main App the experience should start with a html page and each child process should have is own loader.
So basically , you need a http Handler, which linger the request until the the child process is ready. So just make and ajax call from the html , and show loading animation till the service is ready .
//Ajax call for each process and update the UI accordingly
$.get('/services/status/100').then(function(resp) {
$('#service-100').html(resp.responseText);
})
//server side code (express syntax)
app.get('/services/status/:id ', function(req,res) {
// Check if service is ready
serviceManager.isReady(req.params.id, function(err, serviceStats) {
if(err) {
//do logic err here , maybe notify the ui if an error occurred
res.send(err);
return;
}
// notify the ui , that the service is ready to run , and hide loader
res.send(serviceStats);
});
})
I am not sure i understand the question correctly, but you want to wait for a child process to spin on request and you want this request to wait for this child process and then be send to it?
If that is so a simple solution will be to use something like this
var count = 0;//Counter to check
var maxDelay = 45;//In Seconds
var checkEverySeconds = 1;//In seconds
async.whilst(
function () {
return count < maxDelay;
},
function (callback) {
count++;
self.getAppStatus(apiKey, function (err, status) {
if (status != 200) {
return setTimeout(callback, checkEverySeconds * 1000);
}
continueWithRequest();
});
},
function (err) {
if (err) {
return continueWithRequest(new Error('process cannot spin!'));
}
}
);
The function continueWithRequest() will forward the request to the child process and the getAppStatus will return 200 when the child process has started and some other code when it is not. The general idea is that whilst will check every second if the process is running and after 45 second if not it will return an error. Waiting time and check intervals can be easily adjusted. This is a bit crude, but will work for delaying the request and setTimeout will start a new stack and will not block. Hope this helps.
If you know (around) how much time the app takes to get up and running, simply add setTimeout(proxyRequest, <Time it takes the app to start in MS>)
(There are most likely more smart/complicated solutions, but this one is the easiest.)
Why not use event-emitter or messenger?
var eventEmitter = require('event-emitter')
var childStart = require('./someChildProcess').start()
if (childStart !== true) {
eventEmitter.emit('progNotRun', {
data: data
})
}
function proxyRequest(req, res) {
var hostname = req.headers.host.split(":")[0];
proxy.web(req, res, {
target: 'http://' + hostname + ':' + 5000
});
eventEmitter.on('progNotRun', function(data) {
res.end('Cannot run app', data);
})
}

node.js + socket.io + multiple database calls needed for result

I need help.
I've been trying to wrap my head around async programming with node.js and socket.io for a day now. I understand that I need some flow control but I don't seem to understand how to properly implement it.
I have a redis datastore that has modules stored in a set let's say 'moda','modb'
instances of those modules are 'moda:instances' and in 'modb:instances' the properties of those instances are stored in 'moda:instancea' and 'modb:instanceb' as a hash.
I am trying to get the following json:
"moda": {"instancea": {"property1": "value1", "property2", "value2"}}, "modb": {"instanceb": {"property1": "value1"}}
Could someone give me a little push in the right direction?
Here is my current code:
var io = require('socket.io').listen(2000);
var redis = require('redis').createClient();
var http = require('http');
var async = require('async');
var step = require('step');
io.sockets.on('connection', function (socket) {
var notifications = require('redis').createClient();
notifications.subscribe("notification");
notifications.on("message", function (channel, message) {
socket.send(message);
console.log(channel + ':' + message);
});
socket.on('modules', function(params, callback) {
var response = {};
async.series([
function (callback) {
console.log('1>');
redis.smembers('modules', function (err, modules) {
async.forEachSeries(modules, function(module, moduleCallback) {
response[module] = {}
redis.smembers(module + ':instances', function(err, instances) {
async.forEachSeries(instances, function(instance, instanceCallback) {
response[module][instance] = {}
console.log('2>' + module + ':' +instance);
instanceCallback();
});
moduleCallback();
});
});
callback();
});
},
function (callback) {
console.log('3');
callback();
}
], function() {
console.log(JSON.stringify(response));
});
});
});
The output from this code is:
info - socket.io started
debug - client authorized
info - handshake authorized JMMn1I8aiOMGCMPOhC11
debug - setting request GET /socket.io/1/websocket/JMMn1I8aiOMGCMPOhC11
debug - set heartbeat interval for client JMMn1I8aiOMGCMPOhC11
debug - client authorized for
debug - websocket writing 1::
1>
3
{"moda":{}}
2>moda:instancea
2>moda:instanceb
2>modb:instancea
The problem comes from the fact forEachSeries requires an additional callback as a third parameter (to be called when the whole processing is complete). You cannot just put some code after forEachSeries hoping it will be called once it is complete.
Here is your code modified:
var response = {};
async.series([
function (callback) {
console.log('1>');
redis.smembers('modules', function (err, modules) {
async.forEachSeries(modules, function(module, moduleCallback) {
response[module] = {}
redis.smembers(module + ':instances', function(err, instances) {
async.forEachSeries(instances, function(instance, instanceCallback) {
response[module][instance] = {}
console.log('2>' + module + ':' +instance);
instanceCallback();
}, moduleCallback );
});
}, callback );
});
},
function (callback) {
console.log('3');
callback();
}],
function() {
console.log(JSON.stringify(response));
});
Note how the callback, and moduleCallback are used as a third parameter. The output is:
1>
2>moda:instancea
2>moda:instanceb
2>modb:instancea
3
{"moda":{"instancea":{},"instanceb":{}},"modb":{"instancea":{}}}
which I guess is what you expected.
Additional remark: forEachSeries will process everything in sequence, the next operation waiting for the previous one to complete. This will generate plenty of roundtrips to Redis. forEach should be much more efficient here to leverage pipelining.
Take a look also at promise concept, with promises you configure the flow much more readable way.
First you need to prepare promise versions of redis methods that you're using:
var promisify = require('deferred').promisify;
var RedisClient = require('redis').RedisClient;
RedisClient.prototype.psmembers = promisify(RedisClient.prototype.smembers);
Then you can construct your flow as:
console.log('1>');
redis.psmembers('modules').map(function (module) {
response[module] = {};
return redis.psmembers(module + ':instances').map(function (instance) {
response[module][instance] = {};
console.log('2>' + module + ':' +instance);
});
}).end(function () {
console.log('3');
console.log(JSON.stringify(response));
});
Check: https://github.com/medikoo/deferred

Categories

Resources