socket.io Get data io.sockets.clients(); Not working anymore - javascript

Before you can create objects with user properties on frontend and assign it to the socket.user property for each connection using a code like this ex below in the backend.
socket.on("new_visitor", user => {
console.log("new_visitor", user);
socket.user = user;
emitVisitors();
});
then retrieve all these data through the sockets object eg.
const getVisitors = () => {
let clients = io.sockets.clients().connected;
let sockets = Object.values(clients);
let users = sockets.map(s => s.user);
return users;
};
//frontend
componentWillMount() {
axios.get('http://geoplugin.net/json.gp').then(res => {
const {
geoplugin_request,
geoplugin_countryCode,
geoplugin_city,
geoplugin_region,
geoplugin_countryName
} = res.data;
const visitor = {
ip: geoplugin_request,
countrycode: geoplugin_countryCode,
city: geoplugin_city,
state: geoplugin_region,
country: geoplugin_countryName
}
socket.emit("new_visitor", visitor);
socket.on("visitors", visitors => {
this.setState({
visitors: visitors
})
})
});
}
But now the io.sockets.clients is not working anymore and is not recognized as a function.Every API provided seem t return only the Id. For anyone who knows a workaround on this please let us know. Thanks a lot.

Problem : How to hold custom data for each socket (serverside)
For each socket that connects to your socket-io server you want to be able to store some custom data in reference to said socket, so at a later point, other sockets can retrieve this information.
Solution : Add a simple in-memory-store (serverside)
I strongly advise to not add anything or mutating the socket object. Instead use the socket id to maintain a simple in-memory store for all connected sockets.
🚧 Please note: the following snippets are just pointers and are not meant to just be copy pasted. Instead, try to use them to understand your problem and adjust them to your needs.
Server Side
const store = {};
io.on('connection', function (socket) {
// add socket to store, like this
// note 'data' is null at this point, yet needs to be set
store[socket.id] = {
socket : socket,
data : null
}
socket.on('SET_CLIENT_DATA', function (clientdata) {
// here we receive data from frontend, and add it to the serverside reference
store[socket.id].data = clientdata;
// once a socket updates his custom client data
// emit all custom data to all clients
io.emit('ALL_CONNECTED_CLIENTS', Object.values(store).map(e => e.data));
});
socket.on('disconnect', function () {
// if socket disconnects, make sure to remove the reference in your store
delete store[socket.id];
});
});
Client Side
socket.emit("SET_CLIENT_DATA", clientdata);
socket.on("ALL_CONNECTED_CLIENTS", (allclients) => {
/* here the client receives all custom client data that we kept serverside for each connected client */
/* do some more code here */
});

Related

Show user invoices for simultaneously logged in users using Expressjs

I have created a simple invoice application using the MERN stack. The application is great at handling data for the logged in user as long as one user is logged in, but if another user logs in then the invoices for the user that first logged in is shown.
I am currently using app.set() and app.get() to pass data between endpoints and send to my frontend client. Auth0 handles the authentication layer, but would express-session solve this issue? And if it is how would I go about implementing this? Or is there a better solution?
Below is the code that sends the invoices currently to the frontend:
var express = require('express');
var app = express();
var userInvoices = express.Router();
const axios = require('axios');
const InvoiceModel = require('../models/Invoice');
const UserModel = require('../models/User');
//Functions//
async function clientCall() {
const url = `${process.env.REACT_APP_SAVE_USER}`;
const axiosConfig = {
method: 'get',
url
};
await axios(axiosConfig).catch((e) => {console.log(e)});
};
async function fetchUsersFromDB() {
const usersFromDB = await UserModel.find().catch((e) => {console.log(e)});
return usersFromDB;
};
async function saveUser(User) {
const condition = {email: User.email};
const query = {
nickname: User.nickname,
name: User.name,
picture: User.picture,
email: User.email,
email_verified: User.email_verified,
sub: User.sub,
};
const options = { upsert: true };
const update = await UserModel.updateMany(condition, query, options).catch((e) => {console.log(e)});
// Log the amount of documents that where updated in the DB.
if(update.nModified > 0 ) {
console.log('Number of Users added or updated to DB:', update.nModified)
}
};
function findCommonUser(dbUser, User) {
if(dbUser.length <= 0) {
UserModel.create(User, () => {console.log('Users saved to database')});
console.log('An Error has occured with Database')
} else {
dbUser.forEach((DBElement) => {
if(User.email !== DBElement.email) {
saveUser(User);
}
})
}
console.log(' Users match')
};
function matchUserAndInvoice(dbInvoices, loggedInUser) {
let newArray = [];
dbInvoices.forEach(async (DBElement) => {
if(DBElement.customer_email === loggedInUser.email){
newArray.push(DBElement);
app.set('userInvoices', newArray);
}
})
}
// prevents user from having to refresh to get data.
clientCall();
userInvoices.post('/saveUser', async (req, res) => {
try {
const User = req.body;
const usersFromDB = await fetchUsersFromDB().catch((e) => {console.log(e)});
findCommonUser(usersFromDB, User);
app.set('Users', User)
} catch (error) {
console.log(error)
}
})
userInvoices.get('/fetchUserInvoices', async (req,res) => {
try {
const invoices = await InvoiceModel.find().catch((e) => {console.log(e)});
const user = await app.get('Users');
await matchUserAndInvoice(invoices,user);
const userInvoices = await app.get('userInvoices')
res.json(userInvoices);
} catch (error) {
console.log(error)
}
});
;
module.exports = userInvoices;
app.get() is essentially global to your server instance so putting data there to use between requests for individual users will (as you have discovered) get data confused between different users as all users are trying to store data in the same place.
The usual way to solve a problem like this is to use express-session. This cookies the end-user's connection the first time they connect to your server and then creates a server-side object that is automatically associated with that cookie. Then, inside of any request handler, you can read or set data in req.session and that data will uniquely belong to just that user.
If the user changes devices or clears their cookies, then the association with the session object for that user will be lost (creating a new session object for them upon their next connection). But, if you have a persistent data store and you use some sort of login that allows you to populate the session from the user's persistent store, you can even make the session persistent across devices for the same user (though often times this is not required).
In the specific application you describe in your question (tracking invoices), it seems like your invoice data should be tagged with a specific user when it is stored in your database and then any future requests to display invoices should query based on the particular user that is making the request. This requires a login and login cookie so that you can uniquely identify each user and thus show them only the invoices that pertain to them.
The above-described session object should only be used for temporal session state, not for persistent storage such as invoices. If your server implementation is truly RESTFUL, you may not even need any data stored in the session object other than user's logged in userID.

How to store socket id and emit to that user

Hey guys i am working on a webrtc app using socket.io and node, and i have this issue where i made a database in mongodb to store every admin who created a room then fetch the admin id when a user joins the room, but my problem is how do i store the id and emit to that specific user. i tried using io.to(socketID).emit(event,message) but it doesn't emit to the user please help
my code
const users = {}
const admins = {}
io.on('connection',socket =>{
socket.on('join-class',async(classId,palsid)=>{
socket.join(classId)
const gEtLectID = await LiveClass.findOne({"address":classId})
if(gEtLectID){
socketId = gEtLectID.startedBy
}
console.log(socketId,'is admin')
io.to(socketId).emit('user-connected', "let's play a game");
// socket.to(admins[socket.id]).emit('user-connected',userID)
//on disconnection
socket.on('disconnect',()=>{
socket.to(classId).broadcast.emit('user-disconnect',palsid)
})
})
You need to handle the id in the client. when you send an event to the server, include the toId and fromId. So, in the client var socket = io("http://127.0.0.1:3000/", { query: "id=4ny1d" });.
And in the server
io.on('connection', socket => {
var id = socket.handshake.query['id'];
socket.leave(socket.id);//leaving default room
socket.join(id);//joining to custom room(this is admin room)
//maybe you'll want a roomId, but the steps are the same
socket.on('join-class', data => {
socket.join(data.classId);//Here you join to the class
socket.emit('class-ready', data);//message with class info to the suscriber
//here you can store classId in a var or in an array to use socket.to().emit()
})
}

How do I split up my rabbitMQ code across components?

I want to split my rabbitMQ connection code and call it across different components, so that it (the connection and channel) only initializes ONCE and I can use it whenever instead of having to open the connection again when I want to use it.
What happens right now is, I call the below's code function over and over again everytime I want to pass something to my exchange and queue. (so if I want to pass 20 individual data to rabbitMQ, I ended up opening and closing both the connection and channel 20 times)
Any solutions?
const exchange = "Exchange";
const queue = "Queue";
const passSomeData= async payload => {
amqp = require("amqplib").connect("amqp://localhost");
let ch;
let connection;
let publish = amqp
.then(function(conn) {
connection = conn;
return conn.createConfirmChannel();
})
.then(function(chn) {
ch = chn;
ch.assertQueue(queue, { durable: true });
return ch.assertExchange(exchange, "topic", { durable: true });
})
.then(function() {
const data = {
content: "x",
title: "y",
};
ch.bindQueue(queue, exchange, "routingKey");
return ch.publish(exchange, "routingKey", Buffer.from(JSON.stringify(data)), {
persistent: true
});
})
.then(() => {
setTimeout(function() {
connection.close();
}, 250);
});
};
module.exports = passSomeData;
Answer copied from here
This is a general Javascript question and not one specific to RabbitMQ or the amqplib library.
I believe you can open a connection at the module level and use that within your passSomeData method. Or, passSomeData can lazily open a connection if the module-level "connection" variable is null, and then re-use that connection.
At some point you may need to use a connection pool, but that depends on your use-case and workload.
NOTE: the RabbitMQ team monitors the rabbitmq-users mailing list and only sometimes answers questions on StackOverflow.

The correct method of utilizing connection pooling

I am currently developing a server and one of its functions is retrieving data from the server when database, query, credentials, etc are supplied from the client. I am also using socket.io to stream 100 rows from server at a time.
Since I am not sure how many concurrent requests will be made to the database at once at peak, I have set up connectionPool using mssql node module. Below is succinctly how my code looks like.
socket.on('request:database', (connectionConf) => {
let query = connectionConf.query;
let dbAddress = connectionConf.dbAddress;
let id = connectionConf.id;
let password = connectionConf.password;
let dbName = connectionConf.dbName;
let requestedRows = parseInt(connectionConf.noOfRows);
let createExcel = connectionConf.excel;
const sqlConfig = {
user: id,
password: password,
server: dbAddress,
database: dbName,
pool: {
max: 1,
min: 0,
idleTimeoutMillis: 30000
},
options: {
encrypt: true,
}
};
let connectionPool = new sql.ConnectionPool(sqlConfig, err =>
request.stream = true;
request.query(query);
request.on('recordset', columns => {
//run functions with columns
socket.emit('response:headers', //send results);
// Emitted once for each recordset in a query
});
request.on('row', row => {
//Run some functions
socket.emit('result', //send 100 rows at a time);
// Emitted for each row in a recordset
});
)
});
In the above code, request:database socket.io event is triggered every time client makes a query. The problem is that I am not utilizing the pool correctly as I am creating new connectionPool every time client clicks send query button.
So, I cannot create connection pool prior to the button click because they are provided from the client
How do I resolve this connection pooling issue so that I can configure db connection with the data that is provided by the client and at the same time reuse same pool with several end point users?

Flux and WebSockets

I'm using Flux and WebSocket in my Reactjs application and during implementation I've encountered some problems.
Questions:
Assuming I have a set of a set of actioncreators and a store for managing the WebSocket connection, and that the connection is started in a actioncreator (open(token)), where should I put my conn.emit's and how do I get other actions access to my connection object so that they can send data to the backend?
Do I have to pass it as an argument to the actions that are called in the views (eg. TodoActions.create(conn, todo)) or is there a smarter way?
Current code is here
I'm using ES6 classes.
If I have omitted anything necessary in the gist, please let me know.
EDIT:
This is what I have concocted so far based on glortho's answer:
import { WS_URL } from "./constants/ws";
import WSActions from "./actions/ws";
class WSClient {
constructor() {
this.conn = null;
}
open(token) {
this.conn = new WebSocket(WS_URL + "?access_token=" + token);
this.conn.onopen = WSActions.onOpen;
this.conn.onclose = WSActions.onClose;
this.conn.onerror = WSActions.onError;
this.conn.addEventListener("action", (payload) => {
WSActions.onAction(payload);
});
}
close() {
this.conn.close();
}
send(msg) {
return this.conn.send(msg);
}
}
export default new WSClient();
You should have a singleton module (not a store or an action creator) that handles opening the socket and directing traffic through. Then any action creator that needs to send/receive data via the socket just requires the module and makes use of its generic methods.
Here's a quick and dirty untested example (assuming you're using CommonJS):
SocketUtils.js:
var SocketActions = require('../actions/SocketActions.js');
var socket = new WebSocket(...);
// your stores will be listening for these dispatches
socket.onmessage = SocketActions.onMessage;
socket.onerror = SocketActions.onError;
module.exports = {
send: function(msg) {
return socket.send(msg);
}
};
MyActionCreator.js
var SocketUtils = require('../lib/SocketUtils.js');
var MyActionCreator = {
onSendStuff: function(msg) {
SocketUtils.send(msg);
// maybe dispatch something here, though the incoming data dispatch will come via SocketActions.onMessage
}
};
Of course, in reality you'll be doing better and different things, but this gives you a sense of how you might structure it.

Categories

Resources