This is my application : app.js
/** Express **/
var express = require('express');
/** Create express application **/
var app = express();
/** Set application port **/
app.set('port', process.env.PORT || 3000);
/** Set application view engine**/
var handlebars = require('express-handlebars').create({
defaultLayout: 'main',
helpers: {
section: function(name, options){
if(!this._sections) this._sections = {};
this._sections[name] = options.fn(this);
return null;
},
parrot: function(options){
return options.fn(this) + ' <b> parrot </b>';
}
}
});
/*** Cluster Logger**/
app.use(function(req, res, next){
var cluster = require('cluster');
if(cluster.isWorker) console.log('CLUSTER: Worker %d received request.', cluster.worker.id);
next();
});
/** home page**/
app.get('/', function(req, res){
res.send('Welcome !!');
});
/** about page**/
app.get('/about', function(req, res){
res.send('About us!');
});
/** contact page **/
app.get('/contact', function(req, res){
res.send('contact us here');
});
// startServer in export/direct mode
function startServer(){
app.listen(app.get('port'), function(){
console.log('Parrot started in '+app.get('env')+' mode on http://localhost:'+
app.get('port')+
'; \n press Ctrl-C to terminate');
});
}
if(require.main === module){
startServer();
}else{
module.exports = startServer;
}
And this is parrot.js (with cluster include)
//import cluster
var cluster = require('cluster');
//startWorker
function startWorker(){
var worker = cluster.fork();
console.log('CLUSTER: Worker %d started', worker.id);
}
if(cluster.isMaster){
//in case the cluster is Master
require('os').cpus().forEach(function(){
startWorker();
});
cluster.on('disconnect', function(worker){
console.log('CLUSTER: Worker %d disconnected from the cluster', worker.id);
});
cluster.on('exit', function(worker, code, signal){
console.log('CLUSTER: Worker %d died with exit code %d (%s)', worker.id, code, signal);
startWorker();
});
}else{
//in case cluster.isWorker (not master), run app directly
require('./app.js')();
}
The problem, is that when I run node app.js, the app works just fine on http://localhost:3000 ... and the page works great in the browser.
When I run as a set of clusters (with node parrot.js), everything looks good in console:
CLUSTER: Worker 1 started
CLUSTER: Worker 2 started
Parrot started in development mode on http://localhost:3000;
press Ctrl-C to terminate
Parrot started in development mode on http://localhost:3000;
press Ctrl-C to terminate
But, the page loads forever and nothing shows on the browser? I don't know what's the problem here. Sorry for my language for I'm a Node.js newbie.
Thank you
I don't know exactly what's the problem, but when I tested on another computer (with a 32-bit OS), the example above worked without any problems.
This is my result now, when I visit a page in the browser :
CLUSTER: Worker 1 started
CLUSTER: Worker 2 started
Parrot started in development mode on http://localhost:3333;
press Ctrl-C to terminate
Parrot started in development mode on http://localhost:3333;
press Ctrl-C to terminate
CLUSTER: Worker 2 received request.
CLUSTER: Worker 2 received request.
Just in case clusters didn't work for you, test on a different machine.
Another question I have : I don't know why all requests are served by cluster worker 2 (the last one started), it seems like worker 1 doesn't receive any requests.
Thanks
Related
Here is the problem. When I load the page in the browser and check to see if my "test" was emitted, I run into this wall of spamming polling.
The code I use is exactly the same as in other projects I have done, so it makes no sense to me that this doesn't work now. -_-
app.js
var express = require('express');
var app = express();
var server = require('http').createServer(app);
var io = require('socket.io')(server);
var port = process.env.PORT || 8000;
server.listen(port, function(){
console.log('server ready - listening on *:8000');
});
app.get( '/*' , function( req, res, next ) {
//This is the current file they have requested
var file = req.params[0];
//Send the requesting client the file.
res.sendFile( __dirname + '/' + file );
});
io.on('connection', function (socket) {
socket.on('test', function(){
console.log("test worked");
});
});
client.js
var socket = io();
socket.emit("test");
I broke the code down to what you see above. There's nothing else. And it doesn't work. internal screaming
I'll post my comment as an answer so you can wrap up this question. One common reason that socket.io will loop with http requests and never successfully connect is if you are running mismatched version on the client and server. This seems to have happened recently with socket.io upon a recent version change so they must have made some change in how the connection logic works that makes it fail to connect if versions are mismatched.
If you load your client via this:
<script src="/socket.io/socket.io.js"></script>
Then, the client will always match the server version as long as you don't have any other <script> tags that are loading some other version of socket.io.
One more solution which worked for me ( Socket.IO 2.3.0 and Socket.IO Client 2.3.0 ) is to set the transports field when you create the instance of io on back-end and socket on front-end , like this :
On back-end :
io = require('socket.io')(http,{
log: false,
agent: false,
origins : '*:*' , // this is for the CORS browser error , I also use the cors npm module here
transports : [ 'websocket' ]
});
And on front-end :
const socket = socketIOClient(url,{
forceNew : false ,
secure : true ,
transports: [ 'websocket' ]
});
Hope it helps , if not the question owner , then maybe the others :)
I am new to nodeJS...and programming. But I have tried to get this bit of code to work and I cannot understand why it does not seem to work. Whats worse is I do not know how to troubleshoot it either. If I use console.log statements, I can see that once I launch the webpage, it DOES connect, but the webpage never gets a message from the nodeJS server and the server never gets a message from the webpage. I am using Chrome browser.
server.js:
var express = require('express'),
app = express(),
server = require('http').createServer(app),
io = require('socket.io').listen(server);
server.listen(80);
app.use(express.static(__dirname + '/public'));
app.get('/', function (req, res) {
res.sendfile(__dirname + '/index.html');
});
var SerialPort = require('serialport');
var portName = process.argv[2];
var sp = new SerialPort(portName, {
baudRate: 9600,
dataBits: 8,
parity: 'none',
stopBits: 1,
flowControl: false
});
io.sockets.on('connected', function (socket) {
socket.emit('connected', {
data: 'connected'
});
socket.on('connected', function (data) {
console.log(data);
//Code
console.log('Sending Packet. Contents:');
sp.write(packet);
console.log(packet);
console.log('Packet Sent');
});
});
I launch it from command prompt on raspbery pi zero w:
sudo node server.js /dev/ttyACM0
The index.html references the interface.js. The top part of interface.js:
$(document).ready(function() {
// Connect to the node.js server. Gets server's local ip.
// Using this method robot can only be connected to on
// local network.
var ip = location.host;
var socket = io.connect(ip); // Connects to server
// Upon establishing a connection to the socket.io server...
socket.on('connected', function (data) {
console.log(data);
// Send out a message to the server
socket.emit('connected', { command: 'nothing' });
});
When I have console.log statements in the interface.js I get them until the socket.on statement.
node -v
v6.4.0
npm -v
5.3.0
npm list socket.io
socket.io#2.0.3
uname -m
armv6l
Edit: Updated messaging commands. Same issue. Also
Well, turns out I have the wrong version of socket.io.js. So. That was a week of learning. Thanks for the help.
I'd like to start my node js application on boot. Therefore I start a service from Systemd:
[Unit]
Description=Node.js server
After=network.target
[Service]
ExecStart=/usr/bin/node /var/www/Raspberry-Pi-Status/js/server.js
Restart = always
RestartSec=10
StandardOutput=syslog
StandardError=syslog
SyslogIdentifier=nodejs-server
Environment=NODE_ENV=production PORT=8000
Environment=PYTHONPATH=/usr/bin/python
[INSTALL]
WantedBy=multi-user.target
The server.js looks like this:
var util = require('util'),
spawn = require('child_process').spawn,
py = spawn('python',['temperature.py'],{detached: true});
var mysql = require('mysql');
var connection = mysql.createConnection({
host : 'localhost',
user : 'monitor',
password : 'password',
database : 'temps'});
var app = require('http').createServer(handler),
io = require('socket.io').listen(app),
fs = require('fs'),
sys = require('util'),
exec = require('child_process').exec,
child;
// Listen on port 8000
app.listen(8000);
// If all goes well when you open the browser, load the index.html file
function handler(req, res) {
fs.readFile(__dirname+'/../index.html', function(err, data) {
if (err) {
// If no error, send an error message 500
console.log(err);
res.writeHead(500);
return res.end('Error loading index.html');
}
res.writeHead(200);
res.end(data);
});
}
py.stdout.on('data', function(data){
console.log('testing');
date = new Date().getTime();
temp = parseFloat(data);
io.sockets.emit('temperatureUpdate',date,temp);
});
// When we open the browser establish a connection to socket.io.
// Every 5 seconds to send the graph a new value.
io.sockets.on('connection', function(socket) {
console.log('user connected');
});
The node.js application should start a python script which reads out a temperature sensor. When I start node.js via the console everything works fine. However, when I start from Systemd, the python script is not spawned.
What's the problem there? Am I missing something?
Thanks in advance
Alexander
An issue could be the difference in the current working directory when run manually vs systemd. The spawn call used is documented has a default to inherit the current working directory.
When run via the shell, that would be whatever directory you are currently in. In man systemd.exec, you find the "WorkingDirectory=` directive, which documents systemd's default current working directory: "defaults to the root directory when systemd is running as a system instance".
So if your temperature.py is located in /var/www/Raspberry-Pi-Status, then set:
workingDirectory=/var/www/Raspberry-Pi-Status in your `[Service]` section.
I have an ExpressJs (version 4.X) server, and I need to correctly stop the server.
As some requests can take on long time (1-2 seconds), I must reject new connections and wait the end of all the requests in progress. Kill the server in the middle of some tasks could put the server in an unstable state.
I've tried the following code:
//Start
var app = express();
var server = https.createServer(options, app);
server.listen(3000);
//Stop
process.on('SIGINT', function() {
server.close();
});
However, this code doesn't close keep-alive connections, and some clients can keep the connexion for a long time.
So, how can I properly close all connections ?
You could use some middleware to block new requests if the server is being shut down.
var app = express(),
shuttingDown = false;
app.use(function(req, res, next) {
if(shuttingDown) {
return;
}
next();
});
var server = https.createServer(options, app);
server.listen(3000);
process.on('SIGINT', function() {
shuttingDown = true;
server.close(function(){
process.exit();
});
});
I use Nodejs Cluster. I got 8 workers. Whenever I go to the application, I get connected to the same worker, (which is normal since the worker can handle multiple clients.)
For testing purposes, I'd like to connect to different workers without having to siege the application. Is there a way to do that?
Ex: Going to mywebsite.com/3 would connect to 3rd worker.
Here is a port-based solution:
var cluster = require('cluster');
var http = require('http');
if (cluster.isMaster) {
cluster.fork();
cluster.fork();
cluster.fork();
return;
}
function app (req, res) {
res.writeHead(200);
res.end('hello from ' + cluster.worker.id);
}
http.createServer(app).listen(8000);
http.createServer(app).listen(8000 + cluster.worker.id);
for example, if you wish to connect to 2 worker you use port 8002.