Socket.io unable to emit data to client's unique room - javascript

I am using Node.js to create a media upload microservice. This service works by taking in the binary data of the upload to a buffer, and then using the S3 npm package to upload to an S3 bucket. I am trying to use the eventEmitter present in that package which shows the amount of data uploaded to S3, and send that back to the client which is doing the uploading (so that they can see upload progress). I am using socket.io for this sending of progress data back to the client.
The problem I am having is that the .emit event in socket.io will send the upload progress data to all connected clients, not just the client which initiated the upload. As I understand it, a socket connects to a default room on 'connection', which is mirrored by the 'id' on the client side. According to the official docs, using socket.to(id).emit() should send the data scoped only to that client, but this is not working for me.
UPDATED Example code:
server.js:
var http = require('http'),
users = require('./data'),
app = require('./app')(users);
var server = http.createServer(app);
server.listen(app.get('port'), function(){
console.log('Express server listening on port ' + app.get('port'));
});
var io = require('./socket.js').listen(server);
socket.js:
var socketio = require('socket.io');
var socketConnection = exports = module.exports = {};
socketConnection.listen = function listen(app) {
io = socketio.listen(app);
exports.sockets = io.sockets;
io.sockets.on('connection', function (socket) {
socket.join(socket.id);
socket.on('disconnect', function(){
console.log("device "+socket.id+" disconnected");
});
socketConnection.upload = function upload (data) {
socket.to(socket.id).emit('progress', {progress:(data.progressAmount/data.progressTotal)*100});
};
});
return io;
};
s3upload.js:
var config = require('../config/aws.json');
var s3 = require('s3');
var path = require('path');
var fs = require('fs');
var Busboy = require('busboy');
var inspect = require('util').inspect;
var io = require('../socket.js');
...
var S3Upload = exports = module.exports = {};
....
S3Upload.upload = function upload(params) {
// start uploading to uploader
var uploader = client.uploadFile(params);
uploader.on('error', function(err) {
console.error("There was a problem uploading the file to bucket, either the params are incorrect or there is an issue with the connection: ", err.stack);
res.json({responseHTML: "<span>There was a problem uploading the file to bucket, either the params are incorrect or there is an issue with the connection. Please refresh and try again.</span>"});
throw new Error(err);
}),
uploader.on('progress', function() {
io.upload(uploader);
}),
uploader.on('end', function(){
S3Upload.deleteFile(params.localFile);
});
};
When using DEBUG=* node myapp.js, I see the socket.io-parser taking in this information, but it isn't emitting it to the client:
socket.io-parser encoding packet {"type":2,"data":["progress",{"progress":95.79422221709825}],"nsp":"/"} +0ms
socket.io-parser encoded {"type":2,"data":["progress",{"progress":95.79422221709825}],"nsp":"/"} as 2["progress",{"progress":95.79422221709825}] +0ms
However, if I remove the .to portion of this code, it sends the data to the client (albeit to all clients, which will not help at all):
io.sockets.on('connection', function(socket) {
socket.join(socket.id);
socket.emit('progress', {progress: (data.progressAmount/data.progressTotal)*100});
});
DEBUG=* node myapp.js:
socket.io:client writing packet {"type":2,"data":["progress",{"progress":99.93823786632886}],"nsp":"/"} +1ms
socket.io-parser encoding packet {"type":2,"data":["progress",{"progress":99.93823786632886}],"nsp":"/"} +1ms
socket.io-parser encoded {"type":2,"data":["progress",{"progress":99.93823786632886}],"nsp":"/"} as 2["progress",{"progress":99.93823786632886}] +0ms
engine:socket sending packet "message" (2["progress",{"progress":99.93823786632886}]) +0ms
engine:socket flushing buffer to transport +0ms
engine:ws writing "42["progress",{"progress":99.84186540937002}]" +0ms
engine:ws writing "42["progress",{"progress":99.93823786632886}]" +0ms
What am I doing wrong here? Is there a different way to emit events from the server to only specific clients that I am missing?

The second example of code you posted should work and if it does not, you should post more code.
As I understand it, a socket connects to a default room on
'connection', which is mirrored by the 'id' on the client side.
According to the official docs, using socket.to(id).emit() should send
the data scoped only to that client, but this is not working for me.
Socket.io is pretty much easier than that. The code below will send a 'hello' message to each client when they connect:
io.sockets.on('connection', function (socket) {
socket.emit('hello');
});
Everytime a new client connects to the socket.io server, it will run the specified callback using that particular socket as a parameter. socket.id is just an unique code to identify that socket but you don't really need that variable for anything, the code above shows you how to send a message through a particular socket.
Socket.io also provides you some functions to create namespaces/rooms so you can group connections under some identifier (room name) and be able to broadcast messages to all of them:
io.sockets.on('connection', function (socket) {
// This will be triggered after the client does socket.emit('join','myRoom')
socket.on('join', function (room) {
socket.join(room); // Now this socket will receive all the messages broadcast to 'myRoom'
});
...
Now you should understand socket.join(socket.id) just does not make sense because no socket will be sharing socket id.
Edit to answer the question with the new code:
You have two problems here, first:
socketConnection.upload = function upload (data) {
socket.to(socket.id).emit('progress', {progress:(data.progressAmount/data.progressTotal)*100});
};
Note in the code above that everything inside io.sockets.on('connection',function (socket) { will be run each time a clients connect to the server. You are overwriting the function to point it to the socket of the latest user.
The other problem is that you are not linking sockets and s3 operations. Here is a solution merging socket.js and s3upload.js in the same file. If you really need to keep them separated you will need to find a different way to link the socket connection to the s3 operation:
var config = require('../config/aws.json');
var s3 = require('s3');
var path = require('path');
var fs = require('fs');
var Busboy = require('busboy');
var inspect = require('util').inspect;
var io = require('socket.io');
var socketConnection = exports = module.exports = {};
var S3Upload = exports = module.exports = {};
io = socketio.listen(app);
exports.sockets = io.sockets;
io.sockets.on('connection', function (socket) {
socket.on('disconnect', function(){
console.log("device "+socket.id+" disconnected");
});
socket.on('upload', function (data) { //The client will trigger the upload sending the data
/*
some code creating the bucket params using data
*/
S3Upload.upload(params,this);
});
});
S3Upload.upload = function upload(params,socket) { // Here we pass the socket so we can answer him back
// start uploading to uploader
var uploader = client.uploadFile(params);
uploader.on('error', function(err) {
console.error("There was a problem uploading the file to bucket, either the params are incorrect or there is an issue with the connection: ", err.stack);
res.json({responseHTML: "<span>There was a problem uploading the file to bucket, either the params are incorrect or there is an issue with the connection. Please refresh and try again.</span>"});
throw new Error(err);
}),
uploader.on('progress', function() {
socket.emit('progress', {progress:(uploader.progressAmount/uploader.progressTotal)*100});
}),
uploader.on('end', function(){
S3Upload.deleteFile(params.localFile);
});
};

According to the documentation all the users join the default room identified by the socket id, so no need for you to join in on connection. Still according to that, if you want to emit to a room in a namespace from a specific socket you should use socket.broadcast.to(room).emit('my message', msg), given that you want to broadcast the message to all the clients connected to that specific room.

All new connections are automatically joined to room having the name that is equal to their socket.id. You can use it to send messages to specific user, but you have to know socket.id associated with connection initialized by this user. You have to decide how you will manage this association (via databases, or in memory by having an array for it), but once you have it, just send progress percentage via:
socket.broadcast.to( user_socket_id ).emit( "progress", number_or_percent );

Related

NodeJS + Redis + WebSocket memory management?

I have a NodeJS that host a WebSocket Server. The WebSocket redistributes message from Redis.
The full line is, i have some python script that push some data in Redis and after that NodeJS is the WebSocket that reads the Redis newly input data to the connected clients. My problem is that the NodeJs is always taking up memory and after a while it just burst and stops.
I don't know what is my problem, since my code is pretty simple.
I don't need my WebSocket to receive message from the connected clients, since i only need to push them data, but alot of data.
var server = require('websocket').server,
http = require('http');
var redis = require("redis"),
client = redis.createClient();
var socket = new server({
httpServer: http.createServer().listen(443),
keepalive: false
});
client.subscribe("attack-map-production");
socket.on('request', function(request) {
var connection = request.accept(null, request.origin);
connection.on('message', function(message) {
console.log(message);
client.on("message", function(channel, message){
connection.send(message);
});
});
connection.on('close', function(connection) {
console.log('connection closed');
});
});
I'm looking to make this work without eating all the memory on my server and possibly make this much more fast, but i think it's fast enough.
Maybe NodeJS is not meant for this kind of work?
Any help is appreciated.
Thanks.
Update 2016-11-08
With the information provided below, i have "updated" my code. The problem is still there, i will continue to look around to find a answer... but i'm really not getting this.
var server = require('websocket').server,
http = require('http');
var redis = require("redis"),
client = redis.createClient();
var socket = new server({
httpServer: http.createServer().listen(443),
keepalive: false
});
client.subscribe("attack-map-production");
socket.on('request', function(request) {
var connection = request.accept(null, request.origin);
client.on("message", function(channel, message){
connection.send(message);
});
connection.on('close', function(connection) {
console.log('connection closed');
});
});
Update 2016-11-16
So here is my new code:
var io = require('socket.io').listen(443);
var sub = require('redis').createClient();
io.sockets.on('connection', function (sockets) {
sockets.emit('message',{Hello: 'World!'});
sub.subscribe('attack-map-production'); // Could be any patterni
sockets.on('disconnect', function() {
sub.unsubscribe('attack-map-production');
});
});
sub.on('message', function(channel, message) {
io.sockets.json.send(message);
});
Even this code, makes nodejs go at 100% CPU and even more, and it starts to go really slow, until everything just stops.
The complet flow of my data, is that a python script pushes data into Redis, and throught my subscribtion it pushes my data back to the browser by a webSocket and Socket.io.
That simple, how can this be slow? I just don't get it.
client = redis.createClient();
take a look at this line , everytime you invoke the variable client , you create an instance of redis client inside node , and you never close it. so if you recieve 10000 socket 'request' , you will also have 10000 redis instances.
You need to call the command client.quit() once the write or the read to redis is done
var server = require('websocket').server,
http = require('http');
var redis = require("redis"),
client = redis.createClient();
var socket = new server({
httpServer: http.createServer().listen(443),
keepalive: false
});
client.subscribe("attack-map-production");
socket.on('request', function(request) {
var connection = request.accept(null, request.origin);
client.on("message", function(channel, message){
connection.send(message);
});
client.quit(); // MISSING LINE
connection.on('close', function(connection) {
console.log('connection closed');
});
});
and i also noticed this piece of code
httpServer: http.createServer().listen(443)
the port 443 is for https ! so if you are using a secured connection you need to call the module https not http, like this
var socket = new server({
httpServer: https.createServer().listen(443),
keepalive: false
});
hope it helps !
Maybe NodeJS is not meant for this kind of work?
If node is meant for something it's this. I/O stream and reading/writing is the main advantage of node asynchronism.
On what kind of server are you running this ? In a too small EC2 instance you can hit some memory problem.
Else it's a leak. That's kind of hard to trace.
Code is small thought.
I would remove any console.log just in case.
connection.on('message', function(message) {
console.log(message);
client.on("message", function(channel, message){
connection.send(message);
});
});
This part feel suspicious, two variables with the same name, an unused variable, it calls for trouble, and I don't really get why you have to listen for connection message in order to wait for redis message.

Javascript Websocket server message broadcast to clients

I am trying to create a dummy websocket server in javascript to send some message to my android client app. The messages will be injected to the server using a html page( javascript ), which will further be passed on to the android client. I am able to connect these two clients (web and android) individually with the server, however, unable to achieve the flow I want, i.e. Web based javascript sends message to running Nodejs websocket server, which broadcast this message to the android client.
This is the code I am using for server side
var WebSocketServer = require("ws").Server;
var http = require("http");
var express = require("express");
var port = 2001;
var app = express();
app.use(express.static(__dirname + "/../"));
app.get('/someGetRequest', function(req, res, next) {
console.log('receiving get request');
});
app.post('/somePostRequest', function(req, res, next) {
console.log('receiving post request');
});
app.listen(80); //port 80 need to run as root
console.log("app listening on %d ", 80);
var server = http.createServer(app);
server.listen(port);
console.log("http server listening on %d", port);
var userId;
var wss = new WebSocketServer({
server: server
});
wss.on("connection", function(ws) {
console.info("websocket connection open");
var timestamp = new Date().getTime();
userId = timestamp;
ws.send(JSON.stringify({
msgType: "onOpenConnection",
msg: {
connectionId: timestamp
}
}));
ws.on("message", function(data, flags) {
console.log("websocket received a message");
var clientMsg = data;
ws.send(JSON.stringify({
msg: {
connectionId: userId
}
}));
console.log(clientMsg);
});
ws.on("close", function() {
console.log("websocket connection close");
});
});
console.log("websocket server created");
WebClient:
< script type = "text/javascript" >
var websocketURL = 'ws://localhost:2001/';
function startWebSocket() {
try {
ws = new WebSocket(websocketURL);
} catch (e) {
alert("Unable to connect to webserver")
}
}
function sendMessage(text) {
var message = 'Test message from webclient: ' + text;
ws.send(message);
alert(message);
}
startWebSocket(); < /script>
<button onclick="sendMessage('From button1')">Button 1</button > < br >
< button onclick = "sendMessage('From button2')" > Button 2 < /button><br>
Android client:
Just using socket class and its method to do further processing
s = new Socket(HOST, TCP_PORT);
Please let me know how I can pass the message generated from the web client to my android client via websocket server.
I am using nodejs for websocket server implementation.
Thanks
From https://datatracker.ietf.org/doc/html/draft-hixie-thewebsocketprotocol-76
The protocol consists of an initial handshake followed by basic message framing, layered over TCP.
So, just opening a Socket on the client side isn't enough. Maybe this will help https://stackoverflow.com/a/4292671
Also take a look at http:// www.elabs.se/blog/66-using-websockets-in-native-ios-and-android-apps chapter Android client
If you really want to implement the WebSocket stuff yourself, take a look at https://stackoverflow.com/a/8125509 and https://www.rfc-editor.org/rfc/rfc6455
I guess I misread your question. Since the connection between the clients and the server already works, you just need to forward the messages.
First, you need to identify the WebSocket client type (Android or Web). Meaning, you immediately send a message what type of client the newly opened WebSocket connection is and store the WebSocket (ws) for that type in the server. Since you have identified and stored each WebSocket connection, you just forward the message to the other type.
For a more specific answer, I need more information.
Should the communication be bidirectional?
Should there be multiple web and Android connections at the same time?

Send data from websocket to socket.io

I used websocket interface to connect to websocket server . what if i want send data that i receive from the websocket server through my websocket interface to client connected to me through http server , should i use socket.io ?
so at the end i will have socket.io attached to to http server and websocket interface to get data and in case of message come will be send to client through socket.io . is that the best setup ?
Code Example :
// Require HTTP module (to start server) and Socket.IO
var http = require('http'),
io = require('socket.io');
var WebSocket = require('ws');
var ws = new WebSocket('ws://localhost:5000');
// Start the server at port 8080
var server = http.createServer(function (req, res) {
// Send HTML headers and message
res.writeHead(200, {
'Content-Type': 'text/html'
});
res.end('<h1>Hello Socket Lover!</h1>');
});
server.listen(8080);
// Create a Socket.IO instance, passing it our server
var socket = io.listen(server);
ws.on('open', function open() {
ws.send('something');
});
ws.on('message', function (data, flags) {
// here the data will be send to socket.io
});
// Add a connect listener
socket.on('connection', function (client) {
// Success! Now listen to messages to be received
client.on('message', function (event) {
console.log('Received message from client!', event);
});
client.on('disconnect', function () {
clearInterval(interval);
console.log('Server has disconnected');
});
});
Yes, your design is correct.
However, one thing that you should keep in mind is take care of sending the message to the correct client after authentication. In my opinion, it is very easy to make this mistake, partially because of the simplicity of messaging using websockets.

Scaling Socket.IO to multiple Node.js processes using cluster

Tearing my hair out with this one... has anyone managed to scale Socket.IO to multiple "worker" processes spawned by Node.js's cluster module?
Lets say I have the following on four worker processes (pseudo):
// on the server
var express = require('express');
var server = express();
var socket = require('socket.io');
var io = socket.listen(server);
// socket.io
io.set('store', new socket.RedisStore);
// set-up connections...
io.sockets.on('connection', function(socket) {
socket.on('join', function(rooms) {
rooms.forEach(function(room) {
socket.join(room);
});
});
socket.on('leave', function(rooms) {
rooms.forEach(function(room) {
socket.leave(room);
});
});
});
// Emit a message every second
function send() {
io.sockets.in('room').emit('data', 'howdy');
}
setInterval(send, 1000);
And on the browser...
// on the client
socket = io.connect();
socket.emit('join', ['room']);
socket.on('data', function(data){
console.log(data);
});
The problem: Every second, I'm receiving four messages, due to four separate worker processes sending the messages.
How do I ensure the message is only sent once?
Edit: In Socket.IO 1.0+, rather than setting a store with multiple Redis clients, a simpler Redis adapter module can now be used.
var io = require('socket.io')(3000);
var redis = require('socket.io-redis');
io.adapter(redis({ host: 'localhost', port: 6379 }));
The example shown below would look more like this:
var cluster = require('cluster');
var os = require('os');
if (cluster.isMaster) {
// we create a HTTP server, but we do not use listen
// that way, we have a socket.io server that doesn't accept connections
var server = require('http').createServer();
var io = require('socket.io').listen(server);
var redis = require('socket.io-redis');
io.adapter(redis({ host: 'localhost', port: 6379 }));
setInterval(function() {
// all workers will receive this in Redis, and emit
io.emit('data', 'payload');
}, 1000);
for (var i = 0; i < os.cpus().length; i++) {
cluster.fork();
}
cluster.on('exit', function(worker, code, signal) {
console.log('worker ' + worker.process.pid + ' died');
});
}
if (cluster.isWorker) {
var express = require('express');
var app = express();
var http = require('http');
var server = http.createServer(app);
var io = require('socket.io').listen(server);
var redis = require('socket.io-redis');
io.adapter(redis({ host: 'localhost', port: 6379 }));
io.on('connection', function(socket) {
socket.emit('data', 'connected to worker: ' + cluster.worker.id);
});
app.listen(80);
}
If you have a master node that needs to publish to other Socket.IO processes, but doesn't accept socket connections itself, use socket.io-emitter instead of socket.io-redis.
If you are having trouble scaling, run your Node applications with DEBUG=*. Socket.IO now implements debug which will also print out Redis adapter debug messages. Example output:
socket.io:server initializing namespace / +0ms
socket.io:server creating engine.io instance with opts {"path":"/socket.io"} +2ms
socket.io:server attaching client serving req handler +2ms
socket.io-parser encoding packet {"type":2,"data":["event","payload"],"nsp":"/"} +0ms
socket.io-parser encoded {"type":2,"data":["event","payload"],"nsp":"/"} as 2["event","payload"] +1ms
socket.io-redis ignore same uid +0ms
If both your master and child processes both display the same parser messages, then your application is properly scaling.
There shouldn't be a problem with your setup if you are emitting from a single worker. What you're doing is emitting from all four workers, and due to Redis publish/subscribe, the messages aren't duplicated, but written four times, as you asked the application to do. Here's a simple diagram of what Redis does:
Client <-- Worker 1 emit --> Redis
Client <-- Worker 2 <----------|
Client <-- Worker 3 <----------|
Client <-- Worker 4 <----------|
As you can see, when you emit from a worker, it will publish the emit to Redis, and it will be mirrored from other workers, which have subscribed to the Redis database. This also means you can use multiple socket servers connected the the same instance, and an emit on one server will be fired on all connected servers.
With cluster, when a client connects, it will connect to one of your four workers, not all four. That also means anything you emit from that worker will only be shown once to the client. So yes, the application is scaling, but the way you're doing it, you're emitting from all four workers, and the Redis database is making it as if you were calling it four times on a single worker. If a client actually connected to all four of your socket instances, they'd be receiving sixteen messages a second, not four.
The type of socket handling depends on the type of application you're going to have. If you're going to handle clients individually, then you should have no problem, because the connection event will only fire for one worker per one client. If you need a global "heartbeat", then you could have a socket handler in your master process. Since workers die when the master process dies, you should offset the connection load off of the master process, and let the children handle connections. Here's an example:
var cluster = require('cluster');
var os = require('os');
if (cluster.isMaster) {
// we create a HTTP server, but we do not use listen
// that way, we have a socket.io server that doesn't accept connections
var server = require('http').createServer();
var io = require('socket.io').listen(server);
var RedisStore = require('socket.io/lib/stores/redis');
var redis = require('socket.io/node_modules/redis');
io.set('store', new RedisStore({
redisPub: redis.createClient(),
redisSub: redis.createClient(),
redisClient: redis.createClient()
}));
setInterval(function() {
// all workers will receive this in Redis, and emit
io.sockets.emit('data', 'payload');
}, 1000);
for (var i = 0; i < os.cpus().length; i++) {
cluster.fork();
}
cluster.on('exit', function(worker, code, signal) {
console.log('worker ' + worker.process.pid + ' died');
});
}
if (cluster.isWorker) {
var express = require('express');
var app = express();
var http = require('http');
var server = http.createServer(app);
var io = require('socket.io').listen(server);
var RedisStore = require('socket.io/lib/stores/redis');
var redis = require('socket.io/node_modules/redis');
io.set('store', new RedisStore({
redisPub: redis.createClient(),
redisSub: redis.createClient(),
redisClient: redis.createClient()
}));
io.sockets.on('connection', function(socket) {
socket.emit('data', 'connected to worker: ' + cluster.worker.id);
});
app.listen(80);
}
In the example, there are five Socket.IO instances, one being the master, and four being the children. The master server never calls listen() so there is no connection overhead on that process. However, if you call an emit on the master process, it will be published to Redis, and the four worker processes will perform the emit on their clients. This offsets connection load to workers, and if a worker were to die, your main application logic would be untouched in the master.
Note that with Redis, all emits, even in a namespace or room will be processed by other worker processes as if you triggered the emit from that process. In other words, if you have two Socket.IO instances with one Redis instance, calling emit() on a socket in the first worker will send the data to its clients, while worker two will do the same as if you called the emit from that worker.
Let the master handle your heartbeat (example below) or start multiple processes on different ports internally and load balance them with nginx (which supports also websockets from V1.3 upwards).
Cluster with Master
// on the server
var express = require('express');
var server = express();
var socket = require('socket.io');
var io = socket.listen(server);
var cluster = require('cluster');
var numCPUs = require('os').cpus().length;
// socket.io
io.set('store', new socket.RedisStore);
// set-up connections...
io.sockets.on('connection', function(socket) {
socket.on('join', function(rooms) {
rooms.forEach(function(room) {
socket.join(room);
});
});
socket.on('leave', function(rooms) {
rooms.forEach(function(room) {
socket.leave(room);
});
});
});
if (cluster.isMaster) {
// Fork workers.
for (var i = 0; i < numCPUs; i++) {
cluster.fork();
}
// Emit a message every second
function send() {
console.log('howdy');
io.sockets.in('room').emit('data', 'howdy');
}
setInterval(send, 1000);
cluster.on('exit', function(worker, code, signal) {
console.log('worker ' + worker.process.pid + ' died');
});
}
This actually looks like Socket.IO succeeding at scaling. You would expect a message from one server to go to all sockets in that room, regardless of which server they happen to be connected to.
Your best bet is to have one master process that sends a message each second. You can do this by only running it if cluster.isMaster, for example.
Inter-process communication is not enough to make socket.io 1.4.5 working with cluster. Forcing websocket mode is also a must. See WebSocket handshake in Node.JS, Socket.IO and Clusters not working

Update all clients using Socket.io?

Is it possible to force all clients to update using socket.io? I've tried the following, but it doesn't seem to update other clients when a new client connects:
Serverside JavaScript:
I'm attempting to send a message to all clients, which contains the current number of connected users, it correctly sends the amount of users.... however the client itself doesn't seem to update until the page has been refreshed. I want this to happen is realtime.
var clients = 0;
io.sockets.on('connection', function (socket) {
++clients;
socket.emit('users_count', clients);
socket.on('disconnect', function () {
--clients;
});
});
Clientside JavaScript:
var socket = io.connect('http://localhost');
socket.on('connect', function(){
socket.on('users_count', function(data){
$('#client_count').text(data);
console.log("Connection");
});
});
It's not actually sending an update to the other clients at all, instead it's just emitting to the client that just connected (which is why you see the update when you first load)
// socket is the *current* socket of the client that just connected
socket.emit('users_count', clients);
Instead, you want to emit to all sockets
io.sockets.emit('users_count', clients);
Alternatively, you can use the broadcast function, which sends a message to everyone except the socket that starts it:
socket.broadcast.emit('users_count', clients);
I found that using socket.broadcast.emit() will only broadcast to the current "connection", but io.sockets.emit will broadcast to all the clients.
here the server is listening to "two connections", which are exactlly 2 socket namespaces
io.of('/namespace').on('connection', function(){
socket.broadcast.emit("hello");
});
io.of('/other namespace').on('connection',function(){/*...*/});
i have try to use io.sockets.emit() in one namespace but it was received by the client in the other namespace. however socket.broadcast.emit() will just broadcast the current socket namespace.
As of socket.io version 0.9, "emit" no longer worked for me, and I've been using "send"
Here's what I'm doing:
Server Side:
var num_of_clients = io.sockets.clients().length;
io.sockets.send(num_of_clients);
Client Side:
ws = io.connect...
ws.on('message', function(data)
{
var sampleAttributes = fullData.split(',');
if (sampleAttributes[0]=="NumberOfClients")
{
console.log("number of connected clients = "+sampleAttributes[1]);
}
});
You can follow this example for implementing your scenario.
You can let all of clients to join a common room for sending some updates.
Every socket can join room like this:
currentSocket.join("client-presence") //can be any name for room
Then you can have clients key in you sockets which contains multiple client's data(id and status) and if one client's status changes you can receive change event on socket like this:
socket.on('STATUS_CHANGE',emitClientsPresence(io,namespace,currentSocket); //event name should be same on client & server side for catching and emiting
and now you want all other clients to get updated, so you can do something like this:
emitClientsPresence => (io,namespace,currentSocket) {
io.of(namespace)
.to(client-presence)
.emit('STATUS_CHANGE', { id: "client 1", status: "changed status" });
}
This will emit STATUS_CHANGE event to all sockets that have joined "client-presence" room and then you can catch same event on client side and update other client's status.
According to this Broadcasting.
With nodejs server, you can use this:
io.emit('event_id', {your_property: 'your_property_field'});
Be sure to initialise websocket, for example:
var express = require('express');
var http = require('http');
var app = express();
var server = http.Server(app);
var io = require('socket.io')(server);
app.get('/', function (req, res) {
res.send('Hello World!');
io.emit('event_hello', {message: 'Hello Socket'});
});
server.listen(3000, function () {
console.log('Example app listening on port 3000!');
});
In this case, when user reach your server, there will be "event_hello" broadcasted to all web-socket clients with a json object {message: 'Hello Socket'}.

Categories

Resources