PAHO js more then one connection not possible? - javascript

I connect with PAHO Javascript to my mosquitto broker. If i connect with a second client, the first one will be disconnected. With the timeout of 2 Seconds the conetions wil be ping back and force, but can this be the reight solution?
var client = new Paho.MQTT.Client("192.168.5.100", 9880, "/", "mybro");
var reconnectTimeout = 2500;
function connectMqtt(){
console.log("connecting to mqtt ...");
try {
client.connect({
timeout: 3,
onSuccess: onConnect,
useSSL: false,
userName: "user",
password: "password",
keepAliveInterval: 30,
reconnect : false
});
} catch (error) {
console.log(error);
}
client.onConnectionLost = onConnectionLost;
client.onMessageArrived = onMessageArrived;
}
function onConnect() {
try {
client.subscribe("shellies/#");
client.subscribe("openWB/#");
console.log("Connected!");
} catch (error) {
console.log(error);
}
}
function onConnectionLost(responseObject) {
if (responseObject.errorCode !== 0) {
console.log("onConnectionLost:"+responseObject.errorMessage);
setTimeout(connectMqtt, reconnectTimeout);
}
}
function onMessageArrived(message) {
setDataToGui(message.destinationName, message.payloadString);
}
What did I try? Everything, what i have found in internet. There should be a problem in my code.
I need to connect with more then one Webbrowser (clients).

var client = new Paho.MQTT.Client("192.168.5.100", 9880, "/", "mybro");
The problem is the last entry in this function. It is the client ID that must be unique to EVERY client connecting to the broker.
You will need to generate the value most likely as a random number or a very high precision timestamp to limit the chances of 2 clients generating the sme value.

Related

React Websocket gets inactive after some time

I am using Azure Pub-Sub Service for Chatting module in a ReactApplication, I am creating this connection using Websocket.
let ws = new WebSocket(token.url);
ws.onmessage = (data) => {
//Messages Logic
}
when i am in other tabs, or in the sametab for longer time(more than 40-45 mins). I am not receiving messages, but when i refresh the page and websocket initialization code gets executed again and then i receive messages again. Any Suggestions?
Use this technique :
function connect() {
var ws = new WebSocket('ws://localhost:8080');
ws.onopen = function() {
// subscribe to some channels
ws.send(JSON.stringify({
//.... some message the I must send when I connect ....
}));
};
ws.onclose = function(e) {
console.log('Socket is closed. Reconnect will be attempted in 1 second.', e.reason);
setTimeout(function() {
connect();
}, 1000);
};

Nodejs + SocketIO + MySql Connections Not Closing Properly and Creating Database Overhead

I've been having this issue for over a couple of months now, and still can't seem to figure out how to fix it. It seems that I'm experiencing a high number of connections to our database, and I assume it's because our connections aren't closing properly which is causing them to hang for long periods of time. In return this causes a lot of overhead which occasionally causes our web application to crash. Currently the application runs the promise-mysql npm package to create a connection and query the database. Our web application uses socketio to request these connections to our mysql database.
I'm working with existing code that was here before me, so I did not set it up this way. This makes it a bit more confusing for me to debug this issue because I'm not that familiar with how the connections get closed after a successful / unsuccessful query.
When logging errors from our server I'm getting messages like this:
db error { Error: Connection lost: The server closed the connection.
at Protocol.end (/home/ec2-user/myapp/node_modules/mysql/lib/protocol/Protocol.js:113:13)
at Socket.<anonymous> (/home/ec2-user/myapp/node_modules/mysql/lib/Connection.js:109:28)
at Socket.emit (events.js:185:15)
at Socket.emit (domain.js:422:20)
at endReadableNT (_stream_readable.js:1106:12)
at process._tickCallback (internal/process/next_tick.js:178:19) fatal: true, code: 'PROTOCOL_CONNECTION_LOST' }
(Not sure if that has anything to do with the high number of connections I'm seeing or not)
I recently changed the wait_timeout and interactive_timeout to 5000 in MySql, which is way lower than the default 28800, but setting it to this stopped the application from crashing so often.
This is the code for creating the database connection:
database.js file
import mysql from 'promise-mysql';
import env from '../../../env.config.json';
const db = async (sql, descriptor, serializedParameters = []) => {
return new Promise( async (resolve, reject) => {
try {
const connection = await mysql.createConnection({
//const connection = mysql.createPool({
host: env.DB.HOST,
user: env.DB.USER,
password: env.DB.PASSWORD,
database: env.DB.NAME,
port: env.DB.PORT
})
if (connection && env.ENV === "development") {
//console.log(/*"There is a connection to the db for: ", descriptor*/);
}
let result;
if(serializedParameters.length > 0) {
result = await connection.query(sql, serializedParameters)
} else result = await connection.query(sql);
connection.end();
resolve(result);
} catch (e) {
console.log("ERROR pool.db: " + e);
reject(e);
};
});
}
export default db;
And this is an example of what the sockets look like:
sockets.js file
socket.on('updateTimeEntry', async (time, notes, TimeEntryID, callback) => {
try {
const results = await updateTimeEntry(time, notes, TimeEntryID);
callback(true);
//socket.emit("refreshJobPage", false, "");
}
catch (error) {
callback(false);
}
});
socket.on('selectDatesFromTimeEntry', (afterDate, beforeDate, callback) => {
const results = selectDatesFromTimeEntry(afterDate, beforeDate).then((results) => {
//console.log('selectLastTimeEntry: ', results);
callback(results);
})
});
And this is an example of the methods that get called from the sockets to make a connection to the database
timeEntry.js file
import db from './database';
export const updateTimeEntry = (time, notes, TimeEntryID) => {
return new Promise(async (resolve, reject) => {
try {
const updateTimeEntry = `UPDATE mytable SET PunchOut = NOW(), WorkTimeTotal = '${time}', Notes = "${notes}" WHERE TimeEntryID = '${TimeEntryID}';`
const response = await db(updateTimeEntry, "updateTimeEntry");
resolve(response[0]);
} catch (e) {
console.log("ERROR TimeEntry.updateTimeEntry: " + e);
reject(e);
}
});
};
//Gets a List for Assigned Jobs
export const selectDatesFromTimeEntry = (afterDate, beforeDate) => {
return new Promise(async (resolve, reject) => {
try {
const selectDatesFromTimeEntry = `SELECT * FROM mytable.TimeEntry WHERE PunchIn >= '${afterDate}' && PunchIn < '${beforeDate}';`
//console.log("Call: " + selectDatesFromTimeEntry);
const response = await db(selectDatesFromTimeEntry, "selectDatesFromTimeEntry");
//console.log("Response: " + response);
resolve(response);
} catch (e) {
console.log("ERROR TimeEntry.selectDatesFromTimeEntry: " + e);
reject(e);
}
});
};
I just really want to figure out why I'm noticing so much overhead with my database connections, and what I can do to resolve it. I really don't want to have to keep restarting my server each time it crashes, so hopefully I can find some answers to this. If anyone has any suggestions or knows what I can change in my code to solve this issue that would help me out a lot, thanks!
EDIT 1
These are the errors I'm getting from mysql
2020-04-30T11:12:40.214381Z 766844 [Note] Aborted connection 766844 to db: 'mydb' user: 'xxx' host: 'XXXXXX' (Got timeout reading communication packets)
2020-04-30T11:12:48.155598Z 766845 [Note] Aborted connection 766845 to db: 'mydb' user: 'xxx' host: 'XXXXXX' (Got timeout reading communication packets)
2020-04-30T11:15:53.167160Z 766848 [Note] Aborted connection 766848 to db: 'mydb' user: 'xxx' host: 'XXXXXX' (Got timeout reading communication packets)
EDIT 2
Is there a way I can see why some of these connections would be hanging or going idle?
EDIT 3
I've been looking into using a pool instead, as it seems that it is a more scalable and appropriate solution for my application. How can I achieve this with the existing code that I have?
You are opening a new connection for each and every query... Opening a connection is slow, there is a lot of overhead for doing so, and your server certainly does not have unlimited number of connections allowed. The NodeJS mysql package provides a pooling mechanism which would be a lot more efficient for you.
The goal is to reuse the connections as much as possible instead of always disposing of them right after the first query.
In your db.js, create a pool on startup and use it:
var pool = mysql.createPool({
connectionLimit : 10, //Number of connections to create.
host: env.DB.HOST,
user: env.DB.USER,
password: env.DB.PASSWORD,
database: env.DB.NAME,
port: env.DB.PORT
});
To execute your query, you would simply do this:
await pool;
return pool.query(sql, serializedParameters);

how to check internet connectivity in node.js, electron

I'm trying to detect internet connectivity in node.js and electron.
my code notifies internet connectivity every 1 second.
but what I want is showing connectivity when it's connected, disconnected
(only when connection is switched ) not every 1 second.
can I do that in node.js and electron?
main.js
const dns = require('dns');
function liveCheck() {
dns.resolve('www.google.com', function(err, addr){
if (err) {
notifier.notify(
{
appName: "com.myapp.id",
title: "network error",
message: "disconnected",
icon:"./facebook.png"
}
);
}
else{
console.log("connected");
}
});
}
setInterval(function() {
liveCheck()
},1000);
navigator.onLine is not reliable method. So i found npm util to handle this situation
Install it npm i check-internet-connected
And use it
const checkInternetConnected = require('check-internet-connected');
const config = {
timeout: 5000, //timeout connecting to each try (default 5000)
retries: 3,//number of retries to do before failing (default 5)
domain: 'apple.com'//the domain to check DNS record of
}
checkInternetConnected(config)
.then(() => {
console.log("Connection available");
}).catch((err) => {
console.log("No connection", err);
});
This works for me in the latest Electron version 4.0.8 on macOS, from a renderer process, using only Web API:
function notifyUser (event)
{
let myNotification = new Notification
(
"com.myapp.id",
{ body: (event.type === 'online') ? "Internet available" : "No internet" }
);
}
window.addEventListener ('online', notifyUser, false);
window.addEventListener ('offline', notifyUser, false);
References:
navigator.onLine
Online and offline events
Notification
If you were to keep the same logic, you need to add a flag to see if you switched from no connection to connected. I did that with the isConnected
flag:
const dns = require("dns");
let isConnected = false;
function liveCheck() {
dns.resolve("www.google.com", function(err, addr) {
if (err) {
if (isConnected) {
notifier.notify({
appName: "com.myapp.id",
title: "network error",
message: "disconnected",
icon: "./facebook.png",
});
}
isConnected = false;
} else {
if (isConnected) {
//connection is still up and running, do nothing
} else {
notifier.notify({
appName: "com.myapp.id",
title: "connection gained",
message: "connected",
icon: "./facebook.png",
});
}
isConnected = true;
}
});
}
setInterval(function() {
liveCheck();
}, 1000);
First, install internet-available package
npm install internet-available
And then,
var internetAvailable = require("internet-available");
// Set a timeout and a limit of attempts to check for connection
internetAvailable({
timeout: 4000,
retries: 10,
}).then(function(){
console.log("Internet available");
}).catch(function(){
console.log("No internet");
});

Socket.io - Know in which room socket is

I am writing a simple chat on socket.io with node.js server. So I need to know in which room socket is. For example, socket has connected and joined a room. So, when this socket sends a message I want to broadcast it only to the sockets that are in that room (sck.broadcast.to(it_room).emit).
io.on("connection", function(sck) {
...
sck.join(availableRoom);
sck.on("message", function(msg) {
sck.broadcast.to(**its room**).emit(msg);
sck.emit(msg);
});
...
});
I do not need an array of sockets and information about them (in which room etc).
You first enter your username and chat room that you join. Once you submit the form, username and room info will be appended URL as query string. in browser: location.search :this is how we reach to query strings. We have to parse it.
add this CDN to your main.html
<script src="https://cdnjs.cloudflare.com/ajax/libs/qs/6.6.0/qs.min.js"></script>
this cdn for npm querystringify package. since it will be headache to setup npm package in clients side, cdn is easier to handle it. make sure cdn script files should be loaded first before other js files in hour html. once this cdn is loaded we can use:
const { username, room } = Qs.parse(location.search, { ignoreQueryPrefix: true })
{ ignoreQueryPrefix: true } this option will omit the "?" from query string.
once you reached username and room name, we can emit our event from client side.
client.js
socket.emit('join', { username, room }, (error) => {
if (error) {
alert(error)
location.href = '/' //will keep the client in the same url
}
})
now we gotta listen to this event from server side. but we have to keep track of users.
index.js
io.on("connection", socket => {
socket.on("join", ({ username, room }, callback) => {
//we have to define addUser function
let users = [];
const addUser = ({ id, username, room }) => {
// Clean the data
username = username.trim().toLowerCase();
room = room.trim().toLowerCase();
// Validate the data
if (!username || !room) {
return {
error: "Username and room are required!"
};
}
// Check for existing user
const existingUser = users.find(user => {
return user.room === room && user.username === username;
});
// Validate username
if (existingUser) {
return {
error: "Username is in use!"
};
}
// Store user
const user = { id, username, room };
users.push(user);
// users=users.push(user) this will cause error
return { user };
};
//as you see this function will return either error or user object
//Socket generates an id upon connection.
//result is either error or user
const { error, user } = addUser({ id: socket.id, username, room });
if (error) {
return callback(error);
}
socket.join(user.room);//socket joined to this room
//now time to emit a new event
socket.emit("message", ( "Welcome!"));
socket.broadcast
.to(user.room)
.emit(
"message",
(`${user.username} has joined!`)
);
//broadcast will send messages to everyone in the chat room except the connected socket
callback(); //success scenario. let the client knows that it is allowed to connect
})
})
Try with the following, but I am not sure if it works in every situation.
Object.keys(sck.manager.roomClients[sck.id])[1]
I found this by logging socket, then I looked up for rooms information and extract it the way you can see in my code.
I am using 0.9.6 socket.io version, maybe there is a different solution for latest version.

Opentok switching between sessions

I am making an application where users can switch between different voice rooms (sessions).
The function 'connectToSession' (see below) checks if the user is already in a session, if so will disconnect from current session and then connect to the other session and publish its stream to that session.
The switching of voice rooms works fine and to the user is all apears to be working, however every time a user switches session I get this error in the console:
Publisher State Change Failed: 'Publishing' cannot transition to 'PublishingToSession'
Publisher State Change Failed: 'Publishing' cannot transition to 'Publishing'
From my debugging it seems to have occurred on the line where session.publish is called
var session = null;
var publisher = null;
var subscribers = {};
function connectToSession(sessionId, token) {
if (session) {
session.disconnect();
}
if (!publisher) {
//First time so we need to initialise publisher
var pubOptions = {
videoSource: null,
name: user_id
};
publisher = OT.initPublisher(null, pubOptions, function() {
//Publisher initialised
});
publisher.on({
'streamDestroyed': function(event) {
event.preventDefault();
}
});
}
session = OT.initSession(apiKey, sessionId);
session.on({
'streamCreated': function(event) {
// Subscribe to others stream
subscribers[event.stream.name] = session.subscribe(event.stream);
},
'sessionConnected': function(sessionConnectEvent) {
// Session Connected
},
'streamDestroyed': function(event) {
//Stream removed from session
delete subscribers[event.stream.name];
}
});
session.connect(token,
function(error) {
if (error) {
console.error(error);
}
else {
session.publish(publisher, function() {
//Finished publishing
}
);
}
});
}
Any ideas what is causing this error?
if (session) {
if (publisher) {
session.unpublish(publisher);
}
session.disconnect();
}
A workaround is to explicitly call unpublish for a publisher before disconnecting from the session.

Categories

Resources