readFile not running inside while loop - javascript

I am trying to send the contents of a text file through a socket connection every time the text file updates using Express:
console.log('Server running!');
var express = require('express');
var app = express();
var server = app.listen(3000);
var fs = require("fs");
var x = 0;
app.use(express.static('public'));
var socket = require('socket.io');
var io = socket(server);
io.sockets.on('connection', newConnection);
function newConnection(socket) {
console.log("New connection: " + socket.id);
while (true) {
fs.readFile('song.txt', function(err, data) {
if (err) throw err;
console.log(data);
if (data != x) {
var songdata = data;
console.log(songdata);
io.sockets.emit('update', songdata);
x = data;
} else {
console.log("Song is not different:)");
}
})
}
}
Without the while loop, everything works just fine and I recieve the contents in the seperate client. However, now nothing is happening, no console log of data. This indicates the readFile is suddenly no longer running, why?
Thanks:)

First off, some basics. node.js runs your Javascript as single threaded and thus this is a single threaded server. It can only do one thing with your Javascript at a time. But, if you program it carefully, it can scale really well and do lots of things.
Second off, you pretty much never want to do while (true) in server-side Javascript. That's just going to run forever and never let anything else run on your server. Nothing else.
Third, you are attempting to create a new version of that infinite loop every time a new client connects. That's not a correct design (even if there wasn't an infinite loop). You only need one instance of code checking the file, not N.
Now, if you what you're really trying to do is to "poll" for changes in song.txt and notify the client whenever it changes, you need to pick a reasonable time delta between checks on the file and use a timer. This will check that file every so often and let your server run normally all the rest of the time.
Here's a simple version that polls with setInterval():
console.log('Server code started!');
const express = require('express');
const app = express();
const server = app.listen(3000);
const fs = require("fs");
let lastSongData = 0;
app.use(express.static('public'));
const io = require('socket.io')(server);
// get initial songData for future use
// there will not be any connected clients yet so we don't need to broadcast it
try {
lastSongData = fs.readFileSync('song.txt');
} catch(e) {
console.log(e, "\nDidn't find song.txt on server initialization");
}
// here, we create a polling loop that notifies all connected clients
// any time the song has changed
const pollInterval = 60*1000; // 60 seconds, ideally it should be longer than this
const pollTimer = setInterval(() => {
fs.readFile('song.txt', (err, songData) => {
if (!err && songData !== lastSongData) {
// notify all connect clients
console.log("found changed songData");
io.emit('update', songData);
lastSongData = songData;
}
});
}, pollInterval);
io.sockets.on('connection', socket => {
console.log("New connection: " + socket.id);
});
If your songData is binary, then you will have to change how you send the data to the client and how you compare the data to the previous data so you are sending and receiving binary data, not string data and so you are comparing buffers, not strings.
Here's are some references on sending binary data with socket.io:
How to send binary data with socket.io?
How to send binary data from a Node.js socket.io server to a browser client?
A little more efficient way to detect changes to the file is to use fs.watch() which should notify you of changes to the file though you will have to thoroughly test it on whatever platform you are running to make sure it works the way you want. The feature has a number of platform caveats (it does not work identically on all platforms), so you have to test it thoroughly on your platform to see if you can use it for what you want.
console.log('Server code started!');
const express = require('express');
const app = express();
const server = app.listen(3000);
const fs = require("fs");
let lastSongData = 0;
app.use(express.static('public'));
const io = require('socket.io')(server);
// get initial songData for future use
// there will not be any connected clients yet so we don't need to broadcast it
try {
lastSongData = fs.readFileSync('song.txt');
} catch(e) {
console.log(e, "\nDidn't find song.txt on server initialization");
}
// ask node.js to tell us when song.txt is modified
fs.watch('song.txt', (eventType, filename) => {
// check the file for all eventTypes
fs.readFile('song.txt', (err, songData) => {
if (!err && songData !== lastSongData) {
// notify all connect clients
console.log("found changed songData");
lastSongData = songData;
io.emit('update', songData);
}
});
});
io.sockets.on('connection', socket => {
console.log("New connection: " + socket.id);
});
It is unclear from your original code if you need to send the songData to each new connection (whether it has recently changed or not).
If so, you can just change your connection event handler to this:
io.sockets.on('connection', socket => {
console.log("New connection: " + socket.id);
// send most recent songData to each newly connected client
if (lastSongData) {
socket.emit('update', lastSongData);
}
});

Continuously reading the file to detect changes is not a good idea. Instead you should use fs.watch(filename[, options][, listener]) to notify you when the file has changed. When a new socket connects only that socket should have the content broadcast to them, sending it to every client is redundant.
io.sockets.on('connection', newConnection);
var filename = 'song.txt';
function update(socket) {
fs.readFile(filename, function (err, data) {
if (err) throw err;
socket.emit('update', data);
});
}
function newConnection(socket) {
console.log("New connection: " + socket.id);
update(socket); // Read and send to just this socket
}
fs.watch(filename, function () {
console.log("File changed");
update(io.sockets); // Read and send to all sockets.
});

Related

Stop method/interval if user disconnects or condition is met

I want to create a live order page where clients can see the status of their order.
For that reason I want to run a function every 10 seconds that checks the SQL database if the order is ready.
function checkOrder(socket, userid, checkinterval) {
pool.getConnection(function(err, connection) {
// Use the connection
connection.query('SELECT * FROM orders WHERE user = ' + userid + ' ORDER BY timestamp DESC', function(err, rows) {
var alldone = false;
for (var i = 0; i < rows.length; i++) {
if (rows[i]['status'] == 'completed') {
alldone = true;
} else {
alldone = false;
break;
}
}
socket.emit('order-update', rows);
connection.release();
if (alldone) {
console.log('all done');
socket.emit('execute', '$("#orderstatus").html(\'Done\');');
clearInterval(checkinterval);
}
});
});
}
var express = require('express');
var app = express();
var app = express();
var options = {
key: fs.readFileSync('privkey.pem'),
cert: fs.readFileSync('cert.pem'),
ca: fs.readFileSync("chain.pem")
};
var server = require('https').createServer(options, app);
var io = require('socket.io')(server);
var port = 443;
server.listen(port, function() {
console.log('Server listening at port %d', port);
});
io.on('connection', function(socket) {
socket.on('trackorder', function(userid) {
var checkinterval = setInterval(function() {
checkOrder(socket, userid, checkinterval);
}, 10000);
});
socket.on('disconnect', function() {
clearInterval(checkinterval);
});
});
Now I'm having issues on stopping the function if either the job is completed or the client disconnects.
How could I achieve that? I suppose the clearInterval() would work inside the function since it is passed but there is an issue with the on disconnect event handler. Either checkinterval is undefined or if I define it globally it stops the wrong function.
How can this be done properly?
Your checkInterval variable is out of scope when the disconnect event comes. You need to move its definition up a level.
io.on('connection', function(socket) {
// checkInterval variable is declared at this scope so all event handlers can access it
var checkInterval;
socket.on('trackorder', function(userid) {
// make sure we never overwrite a checkInterval that is running
clearInterval(checkInterval);
checkInterval = setInterval(function() {
checkOrder(socket, userid, checkInterval);
}, 10000);
});
socket.on('disconnect', function() {
clearInterval(checkinterval);
});
});
In addition:
I added a guard against overwriting the checkInterval variable if you ever get the trackorder event more than once for the same client.
You mispelled checkinterval in one place.
As others have said, polling your database on behalf of every single client is a BAD design and will not scale. You need to either use database triggers (so it will tell you when something interesting changed) or have your own code that makes relevant changes to the database trigger a change. Do not poll on behalf of every single client.
You have no error handling in either pool.getConnection() or connection.query().
Instead of that complicated setInterval stuff, just add a small IIFE that calls itself if the result isnt there yet. Some pseudocode:
function checkOrder(socket, userid){
//a variable pointing to the running timer
var timer;
//on error clear
socket.on("disconnect", ()=>clearTimout(timer));
//a small IIFE
(function retry(){
pool.getConnection(function(err, connection) {
//parse & notice socket
if (!alldone) //retry
timer = setTimeout(retry, 1000);
});
})();
}
I would say you're using a bad approach. You should go for push rather than pull.
What I mean is, emit the event when status of order changes. Don't put the burden on your database to hit it frequently for no reason.
On successful change of status, emit the event order_status_update with order id and what is the new status
socket.emit('order_status_update', {order_id: 57, status: 'In Process'});
This way you don't need any kind of loop or setinterval etc. No worries even if client is connected or not, its sockat.io business to take care of it. You will just raise the event.

In expressjs how would I limit the number of users who can make a request at the same time?

My code is just a regular app:
app
.use(sassMiddleware({
src: __dirname + '/sass',
dest: __dirname + '/',
// This line controls sass log output
debug: false,
outputStyle: 'compressed'
}))
// More libraries
...
.get('/', auth.protected, function (req, res) {
res.sendfile(__dirname + '/views/index.html');
})
.post('/dostuff', auth.protected, function (req, res) {
console.log(req.body)
res.redirect('back')
child = require('child_process').spawn(
'./script.rb',
['arguments'],
{ stdio: ['ignore', 'pipe', 'pipe'] }
);
child.stdout.pipe(process.stdout);
child.stderr.pipe(process.stdout);
})
My original goal is to limit the number of spawns that /dostuff can spawn to a single instance. I was thinking that there might be a simple way to limit the number of users on the entire app, but can't seem to find any.
I was trying to look for some session limiting mechanism but can't find one either, only various rate limiters but I don't think that's what I want.
Since the app is running in docker I limit the number of tcp connections on the port using iptalbes but this has proven to be less then ideal since the app retains some connections in established state which prevents efficient hand off from one user to another.
So... any programmatic way of doing this?
UPDATE
The app is not an api server. /dostuff is actually triggered by the user from a webpage. That's why simple rate limiting is not the best option. Also the times of execution for the ruby script are variable.
ANSWER
Based on the answer below from #jfriend00, by fixing a couple of logical errors I came up with:
.post('/dostuff*', auth.protected, function (req, res) {
console.log(req.body)
if (spawnCntr >= spawnLimit) {
res.status(502).send('Server is temporarily busy');
console.log("You already have process running. Please either abort the current run or wait until it completes".red)
return;
}
let childClosed = false
function done () {
if (!childClosed) {
--spawnCntr;
childClosed = true;
}
}
++spawnCntr;
child = require('child_process').spawn(
blahblah
);
child.on('close', done);
child.on('error', done);
child.on('exit', done);
child.stdout.pipe(process.stdout);
child.stderr.pipe(process.stdout);
res.redirect('back');
})
I am still going to accept his answer although incomplete it helped a lot.
You can keep a simple counter of how many spawn() operations are in process at the same time and if a new request comes in and you are currently over that limit, you can just return a 502 error (server temporarily busy).
let spawnCntr = 0;
const spawnLimit = 1;
app.post('/dostuff', auth.protected, function (req, res) {
console.log(req.body)
if (spawnCntr > spawnLimit) }
return res.status(502).send("Server temporarily busy");
}
let childClosed = false;
function done() {
// make sure we count it closing only once
if (!childClosed) {
--spawnCntr;
childClosed = true;
}
}
++spawnCntr;
let child = require('child_process').spawn(
'./script.rb',
['arguments'],
{ stdio: ['ignore', 'pipe', 'pipe'] }
);
// capture all the ways we could know it's done
child.on('close', done);
child.on('error', done);
child.on('exit', done);
child.stdout.pipe(process.stdout);
child.stderr.pipe(process.stdout);
res.redirect('back');
});
Note: The code in your question does not declare child as a local variable which looks like a bug waiting to happen.
You can use the node package Express Rate Limit - https://www.npmjs.com/package/express-rate-limit.
For an API-only server where the rate-limiter should be applied to all requests:
var RateLimit = require('express-rate-limit');
app.enable('trust proxy'); // only if you're behind a reverse proxy (Heroku, Bluemix, AWS if you use an ELB, custom Nginx setup, etc)
var limiter = new RateLimit({
windowMs: 15*60*1000, // 15 minutes
max: 100, // limit each IP to 100 requests per windowMs
delayMs: 0 // disable delaying - full speed until the max limit is reached
});
// apply to all requests
app.use(limiter);
For a "regular" web server (e.g. anything that uses express.static()), where the rate-limiter should only apply to certain requests:
var RateLimit = require('express-rate-limit');
app.enable('trust proxy'); // only if you're behind a reverse proxy (Heroku, Bluemix, AWS if you use an ELB, custom Nginx setup, etc)
var apiLimiter = new RateLimit({
windowMs: 15*60*1000, // 15 minutes
max: 100,
delayMs: 0 // disabled
});
// only apply to requests that begin with /api/
app.use('/api/', apiLimiter);

Node js get and set data from different process

I've node application which done spawn(child process) to and application,
the application have host and port:
var exec = require('child_process').spawn;
var child = exec('start app');
console.log("Child Proc ID " + child.pid)
child.stdout.on('data', function(data) {
console.log('stdout: ' + data);
});
child.stderr.on('data', function(data) {
console.log('stdout: ' + data);
});
child.on('close', function(code) {
console.log('closing code: ' + code);
});
some application will start immediately and some application will take some time 10 - 20 sec until they start.
Now I use the node http proxy to run the app and the problem is that Im getting error when the use want to run the app before it up and running.
Any idea how somehow I can solve this issue?
proxy.on('error', function (err, req, res) {
res.end('Cannot run app');
});
Btw, I cannot send response 500 in proxy error due to limitation of our framework. Any other idea how can I track the application maybe with some timeout to see weather it send response 200.
UPDATE - Sample of my logic
httpProxy = require('http-proxy');
var proxy = httpProxy.createProxyServer({});
http.createServer(function (req, res) {
console.log("App proxy new port is: " + 5000)
res.end("Request received on " + 5000);
}).listen(5000);
function proxyRequest(req, res) {
var hostname = req.headers.host.split(":")[0];
proxy.web(req, res, {
target: 'http://' + hostname + ':' + 5000
});
proxy.on('error', function (err, req, res) {
res.end('Cannot run app');
});
}
What you need is to listen for the first response on your proxy and look at its status code to determine whether your app started successfully or not. Here's how you do that:
proxy.on('proxyRes', function (proxyRes, req, res) {
// Your target app is definitely up and running now because it just sent a response;
// Use the status code now to determine whether the app started successfully or not
var status = res.statusCode;
});
Hope this helps.
Not sure if it make sense , In your Main App the experience should start with a html page and each child process should have is own loader.
So basically , you need a http Handler, which linger the request until the the child process is ready. So just make and ajax call from the html , and show loading animation till the service is ready .
//Ajax call for each process and update the UI accordingly
$.get('/services/status/100').then(function(resp) {
$('#service-100').html(resp.responseText);
})
//server side code (express syntax)
app.get('/services/status/:id ', function(req,res) {
// Check if service is ready
serviceManager.isReady(req.params.id, function(err, serviceStats) {
if(err) {
//do logic err here , maybe notify the ui if an error occurred
res.send(err);
return;
}
// notify the ui , that the service is ready to run , and hide loader
res.send(serviceStats);
});
})
I am not sure i understand the question correctly, but you want to wait for a child process to spin on request and you want this request to wait for this child process and then be send to it?
If that is so a simple solution will be to use something like this
var count = 0;//Counter to check
var maxDelay = 45;//In Seconds
var checkEverySeconds = 1;//In seconds
async.whilst(
function () {
return count < maxDelay;
},
function (callback) {
count++;
self.getAppStatus(apiKey, function (err, status) {
if (status != 200) {
return setTimeout(callback, checkEverySeconds * 1000);
}
continueWithRequest();
});
},
function (err) {
if (err) {
return continueWithRequest(new Error('process cannot spin!'));
}
}
);
The function continueWithRequest() will forward the request to the child process and the getAppStatus will return 200 when the child process has started and some other code when it is not. The general idea is that whilst will check every second if the process is running and after 45 second if not it will return an error. Waiting time and check intervals can be easily adjusted. This is a bit crude, but will work for delaying the request and setTimeout will start a new stack and will not block. Hope this helps.
If you know (around) how much time the app takes to get up and running, simply add setTimeout(proxyRequest, <Time it takes the app to start in MS>)
(There are most likely more smart/complicated solutions, but this one is the easiest.)
Why not use event-emitter or messenger?
var eventEmitter = require('event-emitter')
var childStart = require('./someChildProcess').start()
if (childStart !== true) {
eventEmitter.emit('progNotRun', {
data: data
})
}
function proxyRequest(req, res) {
var hostname = req.headers.host.split(":")[0];
proxy.web(req, res, {
target: 'http://' + hostname + ':' + 5000
});
eventEmitter.on('progNotRun', function(data) {
res.end('Cannot run app', data);
})
}

How to check if a queue already exists or not in node.js amqp client?

I have a following scenario. I want to implement pub/sub model of rabbitmq using amqp client. I have subscribers who subscribe for a particular id. I want to form a queue on teh name of that id. So that all the subscribers who want to subscribe to that particular id should subscribe to that particular queue. So i want to form a unique queue based on a particular id for all the subscribers with that id. But in my case it is forming different queues for same id (i guess). As a result, subscribers of a particular id are able to receive all the messages in round robin manner, inspite of all teh subscribers receiving all the messages. Below is the code i m using.
var express = require('express');
var amqp = require('amqp');
var app = express();
var httpServer = require('http').createServer();
var socketio = require('socket.io');
var io = socketio.listen(httpServer);
httpServer.listen(8000);
console.log("server started at port no 8000");
var rabbitConnection = amqp.createConnection({host:"localhost", port:"5672"});
var chatExchange;
rabbitConnection.on("ready", function()
{
console.log("RabbitConnection got ready");
chatExchange = rabbitConnection.exchange("chatExchange", {"type":"topic"});
//console.log("value of chatExchange is :",chatExchange);
});
io.sockets.on("connection", function(socket)
{
console.log("socket connected");
socket.on("metaData", function(data)
{
if(data.type=="publisher")
{
console.log("publisher connected");
socket.type="publisher";
}
else if(data.type=="subscriber")
{
console.log("subscriber connected");
socket.type="subscriber";
rabbitConnection.queue(""+data.channelName, function(queue) //MAIN PROBLEM LIES IN ABOVE LINE I GUESS. IT WILL FORM QUEUE FOR EVERY SOCKET CONNECTION. BUT IT SHOULD NOT FORM QUEUE IF IT ALREADY EXISTS.
{
console.log("queue formed");
console.log("queue is ", queue);
queue.bind("chatExchange",""+data.channelName);
queue.subscribe(function(message)
{
console.log("message is "+message.data.toString());
console.log("message emitted");
socket.emit("message",{"text":message.data.toString()});
});
});
}
});
socket.on("disconnect",function()
{
console.log("socket disconnected");
});
socket.on("message", function(data)
{
chatExchange.publish(""+data.channelName, data.text);
console.log("message came");
});
})
I have seen documentation of node-amqp module. But i didnot find any option to chek the existence of queue. some 'persistence' option was there. But i dont thing it will work. Please help....
Thanks in advance..

Nodejs Cluster with socket.io loses connections

I am using node.js with cluster module in order to create multiprocess socket.io server.
Using this example I have created the following server-client application:
Server:
var cluster = require('cluster');
// Start master process
if (cluster.isMaster) {
// Create slave workers (in this example there is only one)
var worker = cluster.fork();
// Starts a server that will delegate the connection to the worker
var routerServer = net.createServer(function(connection) {
worker.send({type: 'new_connection'}, connection);
});
routerServer.listen(443, "my-ip-address");
} else {
// Start a server on random Port
var slaveHttp = require("http").Server();
slaveHttp.listen(0, "localhost");
// Connect socket.io to the server
var io = require('socket.io')(slaveHttp);
io.on('connection', function (socket) {
console.log("Connected");
});
// On server messsage (new connection) emit it to the server
process.on('message', function(message, handle) {
if (message.type == 'new_connection') {
slaveHttp.emit('connection', handle);
};
});
}
Client:
var io = require('socket.io-client');
function connect(index) {
connection = io.connect("http://my-ip-address", {
reconnection: true,
reconnectionDelay: 1000,
timeout: 3000,
multiplex: false });
connection.on('connect', function (socket) {
console.log(index + ': Connected');
});
connection.on('connect_error', function (e) {
console.log(index + ': Connection Error: ' + e);
});
connection.on('message', function () {
console.log(index + ': Server message');
});
connection.on('disconnect', function () {
console.log(index + ': disconnect');
});
}
for (var i = 0; i < 10; i++) {
setTimeout(function(index) {
return function() { connect(index); }
}(i), i * 5000);
};
The problem is that some of the clients in the above example managed to connect to the server and exchange messages, but the others failing on timeout, weirder then that I can see in the server's console that it received the connections for the timed-out clients but for some reason they don't succeed to communicate.
If I replace the server code to be on the same process this code runs perfectly.
Can anyone help me?
I found out this is a bug in Node.js, the problem is that there is auto-read machanism in Node.js sockets that starts to read out of every new socket automatically.
When we pass the socket to the child process it can either been read already or not, under heavy load there is more change that the socket will be read by the master (which of course is cuasing the problem), therefore we should explicitly ask to stop reading from this socket.
The following line is a nice workaround (should be added just before worker.send call):
connection._handle.readStop();
Source: https://github.com/joyent/node/issues/7905

Categories

Resources