How to improve websocket 'onmessage' - javascript

I'm learning websockets and wanted to make a websocket onmessage logger which writes the received data in a mongodb.
I'm starting my script with
node listner.js
listner.js:
'use strict';
let DBAbstract = require('./db-controller');
const WebSocket = require('ws');
// getting an instance of a mongodb connection
let mongoInstance = new DBAbstract();
const ws = new WebSocket('ws://ws-url');
ws.onopen = function() {
console.log('Open')
};
ws.onmessage = function(d) {
console.log(d.data)
mongoInstance.insertOne(JSON.parse(d.data)) //Promise which add the data
};
ws.onclose = function() {
console.log('Close')
};
ws.onerror = function(e) {
console.log(e.code)
};
I made this script so far and it works.
When there is an onmessage Event I'm getting a small JSON like this.
{ "event":2,
"value": 12,
"item": 'Spoon' }
I was just wondering if this is might be enough in terms of scalability of the received onmessage Events.
I mean there is no problem when I receive three times of small-JSON's in 10 seconds.
What will happen when I'm receiving 100 small-JSON's in 10 seconds ?
Where is the limit in receive onmessage events as Client ?
Will my listner.js crash because I can't handle the amount of onmessage Events ? Or will my mongodb crash because it can't handle the amount of database writes
Can I improve this code ?

Related

How to access input/output of worklet node from main file?

I have created a real time voice chat application for a game I am making. I got it to work completely fine using audiocontext.createScriptProcessor() method.
Here's the code, I left out parts that weren't relevant
//establish websocket connection
const audioData = []
//websocket connection.onMessage (data) =>
audioData.push(decodeBase64(data)) //push audio data coming from another player into array
//on get user media (stream) =>
const audioCtx = new AudioContext({latencyHint: "interactive", sampleRate: 22050,})
const inputNode = audioCtx.createMediaStreamSource(stream)
var processor = audioCtx.createScriptProcessor(2048, 1, 1);
var outputNode = audioCtx.destination
inputNode.connect(tunerNode)
processor.connect(outputNode)
processor.onaudioprocess = function (e) {
var input = e.inputBuffer.getChannelData(0);
webSocketSend(input) //send microphone input to other sockets via a function set up in a different file, all it does is base 64 encode then send.
//if there is data from the server, play it, else, play nothing
var output
if(audioData.length > 0){
output = audioData[0]
audioData.splice(0,1)
}else output = new Array(2048).fill(0)
};
the only issue is that the createScriptProccessor() method is deprecated. As recommended, I attempted to do this using Audio Worklet Nodes. However I quickly ran into a problem. I can't access the user's microphone input, or set the output from the main file where the WebSocket connection is.
Here is my code for main.js:
document.getElementById('btn').onclick = () => {createVoiceChatSession()}
//establish websocket connection
const audioData = []
//webSocket connection.onMessage (data) =>
audioData.push(data) //how do I get this data to the worklet Node???
var voiceChatContext
function createVoiceChatSession(){
voiceChatContext = new AudioContext()
navigator.mediaDevices.getUserMedia({audio: true}).then( async stream => {
await voiceChatContext.audioWorklet.addModule('module.js')
const microphone = voiceChatContext.createMediaStreamSource(stream)
const processor = new AudioWorkletNode(voiceChatContext, 'processor')
microphone.connect(processor).connect(voiceChatContext.destination)
}).catch(err => console.log(err))
}
Here is my code for module.js:
class processor extends AudioWorkletProcessor {
constructor() {
super()
}
//copies the input to the output
process(inputList, outputList) { // how do I get the input list data (the data from my microphone) to the main file so I can send it via websocket ???
for(var i = 0; i < inputList[0][0].length; i++){
outputList[0][0][i] = inputList[0][0][i]
outputList[0][1][i] = inputList[0][1][i]
}
return true;
}
}
registerProcessor("processor", processor);
So I can record and process the input, but I can't send input via WebSocket or pass in data that is coming from the server to the worklet node because I can't access the input list or output list from the main file where the WebSocket connection is. Does anyone know a way to work around this? Or is there a better solution that doesn't use audio worklet nodes?
Thank you to all who can help!
I figured it out, all I needed to do was use the port.onmessage method to exchange data between the worklet and the main file.
processor.port.onmessage = (e) => {//do something with e.data}

Socket.io reconnection firing no matter what i do

Using socket.io 1.4.5.
It seems no matter what I do, I cannot prevent socket.io from firing the reconnection event or destroying the client socket when there are interruptions with the internet connection.
On the client side, I have:
reconnectionDelay: 99999999,
timeout: 99999999999,
reconnection: false,
And yet, if the internet disconnects, the socket will become undefined (after 20 seconds or so) and the reconnection event fires when the internet goes back on.
My ultimate goal is to use the same exact socket on the server and on the client (regardless of how long it's been since they've communicated) unless that socket is explicitly disconnected on the server. I cannot have the socket reconnecting at will, because I store data on the socket and use the socket.id extensively in my application. IF the socket and socket id were to suddenly change, the application breaks.
I think You've to pass some unique variable on handshake and keep data by that variable.
Regeneration of socket.id is normal behavior of socket.io.
In my practice I'm doing initial request to server to create slot variable and keep it slots collection in mongodb and after I'm creating connection by defining that slot variable as handshake.
or let's just simplify answer to Your question and use: https://www.npmjs.com/package/express-socket.io-session
also a little "hack" use namespaces logic: https://socket.io/docs/rooms-and-namespaces/#custom-namespaces
BONUS:
here is quick solution:
clientside:
function genUUID() {
var d = new Date().getTime();
var uuid = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
var r = (d + Math.random()*16)%16 | 0;
d = Math.floor(d/16);
return (c=='x' ? r : (r&0x3|0x8)).toString(16);
});
return uuid;
};
var namespace = localStorage.getItem("namespace");
if(!namespace) {
namespace = genUUID();
localStorage.setItem("namespace", namespace);
}
var connect = function (ns) {
return io.connect(ns, {
query: 'ns='+ns // this is handshake variable `ns`
});
}
var socket = connect('/'+namespace);
backend:
const
url = require('url'),
sharedData = {};
io.sockets.on('connection', (socket) => {
const handshake = url.parse(socket.handshake.url, true);
const ns = handshake.query ? handshake.query.ns : 'default';
console.log('GOT CONNECTION TO NS: '+ns);
io.of(ns).on('connection', (socket) => {
if(!sharedData[ns]) sharedData[ns] = {};
// put socket code here
socket.on('some-event', (data) => {
sharedData[ns]['some-event'] = data.someData;
});
});
});

RabbitMQ for NodeJS with Express routing

My server is running NodeJS and uses the amqplib api to request data from another application. The NodeJS server is receiving the information successfully but there's a noticable delay and I'm trying to determine whether I am doing this in the most efficient manner. Specifically I'm concerned with the way that I open and close connections.
Project Layout
I have two controller files that handle receiving and requesting the data, request.img.server.controller.js and receive.img.server.controller.js. Finally the routes handle the controller methods when a button on the front end is pushed, oct.server.routes.js.
request.img.server.controller.js
'use strict';
var amqp = require('amqplib/callback_api');
var connReady = false;
var conn, ch;
amqp.connect('amqp://localhost:5672', function(err, connection) {
conn = connection;
connReady = true;
conn.createChannel(function(err, channel) {
ch = channel;
});
});
exports.sendRequest = function(message) {
console.log('sending request');
if(connReady) {
var ex = '';
var key = 'utils';
ch.publish(ex, key, new Buffer(message));
console.log(" [x] Sent %s: '%s'", key, message);
}
};
receive.img.server.controller.js
var amqp = require('amqplib/callback_api');
var fs = require('fs');
var wstream = fs.createWriteStream('C:\\Users\\yako\\desktop\\binarytest.txt');
var image, rows, cols;
exports.getResponse = function(resCallback) {
amqp.connect('amqp://localhost:5672', function(err, conn) {
conn.createChannel(function(err, ch) {
var ex = '';
ch.assertQueue('server', {}, function(err, q) {
console.log('waiting for images');
var d = new Date();
var n = d.getTime();
ch.consume(q.queue, function(msg) {
console.log(" [x] %s: '%s'", msg.fields.routingKey, msg.content.toJSON());
rows = msg.content.readInt16LE(0);
cols = msg.content.readInt16LE(2);
console.log("rows = %s", msg.content.readInt16LE(0));
console.log("cols = %s", msg.content.readInt16LE(2));
image = msg.content;
var currMax = 0;
for (var i = 4; i < image.length; i+=2) {
if (image.readInt16LE(i) > currMax) {
currMax = image.readInt16LE(i);
}
wstream.write(image.readInt16LE(i) + ',');
}
console.log('done writing max is', currMax);
//console.log(image);
resCallback(rows, cols, image);
}, {
noAck: true
});
});
});
});
};
oct.server.routes.js
'use strict';
module.exports = function(app) {
var request_img = require('../../app/controllers/image-tools/request.img.server.controller.js');
var receive_img = require('../../app/controllers/image-tools/receive.img.server.controller.js');
// oct routes
app.get('/load_slice', function(req, res) {
console.log('load slice hit');
receive_img.getResponse(function (rows, cols, image) {
res.end(image);
});
request_img.sendRequest('123:C:\\Users\\yako\\Documents\\Developer\\medicaldiag\\test_files\\RUS-01-035-09M-21.oct');
});
};
The way you're opening connections is bad, and is at least part of the performance problem.
Connections are expensive to open. They open a new TCP/IP connection on a TCP/IP port between the client and rabbitmq server. This takes time, and uses up a limited resource on both the client and server.
Because of this, a single connection to RabbitMQ should be created and used within each of your node.js processes. This one connection should be shared by all of the code in that process.
Whenever you need to do something with RabbitMQ, open a new channel on the shared connection and do your work. Channels are cheap and are meant to be opened and closed as needed, within a connection.
More specifically in your code, the receive.img.server.controller.js file is the major problem. This opens a new connection to RabbitMQ every time you call the getResponse method.
If you have 10 users hitting the site, you'll have 10 open RabbitMQ connections when 1 would be sufficient. If you have thousands of users hitting the site, you'll have thousands of open RabbitMQ connections when 1 would be sufficient. You also run the risk of exhausting your available TCP/IP connections on the RabbitMQ server or client.
Your receive.img.server.controller.js should look more like your request.img.server.controller.js - one connection open, and re-used all the time.
Also, FWIW - I recommend using the wascally library for RabbitMQ w/ node.js. This library sits on top of amqplib, but makes things significantly easier. It will manage your one connection for you, and make it easier for you to send and receive messages.
I also have some training material available for RabbitMQ and node.js that covers the basics of amqplib and then moves in to using wascally for real application development.

Node.js: Receiving too many UDP messages at a time, losing them

My node server receives about 400 UDP messages in one second, and it all works, and I am able to process all 400 of them.
However, when I start to receive about 700 UDP messages in one second, I lose 2-20 of the messages, and they never get parsed :(
I have thought about some options here:
Create a queue of all the socket messages, then consume one-by-one,
although I'm not sure how to implement this
Can't figure out how to implement
Find a setting in Node / Express / dgram socket where i can increase the memory size / buffer size, something like that
I couldn't find any settings like this, though :(
Use a different UDP receiver, stop using node's build in socket UDP receiver
Didn't find other receivers
Here's what my UDP sender looks like:
var dgram = require("dgram");
var udpserver = dgram.createSocket("udp4");
var seatStateStore = require("./SeatStateStore");
udpserver.on("message",
function (msg, rinfo)
{
seatStateStore.parseMessage(msg.toString());
});
Anyone have any ideas? I couldn't figure out any of the 3 options :/ Can someone help me out?
Node v0.10.29
Express v3.14.0
===============================
UPDATE / SOLUTION
Here's the code I ended up using (slightly modified #RoyHB 's solution):
var dgram = require("dgram");
var udpserver = dgram.createSocket("udp4");
var seatStateStore = require("./SeatStateStore");
var Dequeue = require('dequeue');
var FIFO = new Dequeue();
fetcher();
udpserver.on("message",
function (msg, rinfo)
{
FIFO.push(msg.toString());
});
udpserver.bind(43278);
function fetcher () {
while (FIFO.length > 0)
{
var msg = FIFO.shift();
seatStateStore.parseMessage(msg);
}
setImmediate(fetcher); //make this function continuously run
}
I know there is already an answer to this, but as of today, I found a way to increase the buffer on dgram from the official documentation: official doc.
socket.setRecvBufferSize(size);
Added in: v8.7.0
size <integer>
Sets the SO_RCVBUF socket option. Sets the maximum socket receive buffer in bytes.
socket.setSendBufferSize(size)
Added in: v8.7.0
size <integer>
Sets the SO_SNDBUF socket option. Sets the maximum socket send buffer in bytes.
Usage example:
var socket = dgram.createSocket('udp4');
socket.on("listening", () => {
socket.setRecvBufferSize(100000000); // 100mb
socket.setSendBufferSize(100000000); // 100mb
});
The default value is 65507
There is a NPM module called node-dequeue. I use it a lot for similar situations to yours.
basically,
your program pushes received messages onto the end of the queue.
an interval timer periodically activates another method or function ( a queue-fetcher) which checks to see if there are messages on the queue and if so, fetches one or more and processes it.
Alternatively (maybe better) no timer is used to schedule queue fetches. Instead the node process.nextTick method is used.
Alternatively, maybe preferably, you can use node process.nextTick to continuously check the queue for messages.
Ideally, seatStateStore.parseMessage would create a new object to asynchronously process one message so that parseMessage returns without delay while the actual message processing continues in the background. (see bottom of example code )
I haven't tested the code below, it's meant to illustrate, not to run
var FIFO = require ('dequeue');
var seatStateStore = require("./SeatStateStore");
var dgram = require("dgram");
setInterval(fetcher, 1);
var udpserver = dgram.createSocket("udp4");
udpserver.on("message",
function (msg, rinfo) {
FIFO.push(msg);
}
);
function fetcher () {
while (FIFO.length > 0) {
var msg = FIFO.shift();
seatStateStore.parseMessage(msg);
}
}
** OR (maybe better) **
var FIFO = require ('dequeue');
var seatStateStore = require("./SeatStateStore");
var dgram = require("dgram");
fetcher();
var udpserver = dgram.createSocket("udp4");
udpserver.on("message",
function (msg, rinfo) {
FIFO.push(msg);
}
);
function fetcher () {
while (FIFO.length > 0) {
var msg = FIFO.shift();
seatStateStore.parseMessage(msg);
process.nextTick(fetcher);
}
}
Outline of seatStateProcessor.parseMessage:
seatStateProcessor.parseMessage = function (msg) {
proc = new asyncProcHandler(msg, function (err) {
if (err) {
//handle the error
}
});
}

really strange behaviour on node.js using connect-form, socket.io and express

the following code:
req.form.on('progress', function(bytesReceived, bytesExpected){
var percent = (bytesReceived / bytesExpected * 100) | 0;
// progressEvent.download(percent);
io.sockets.on('connection', function (socket) {
socket.emit('progress', { percent: percent});
client = socket;
});
});
written on an http post handler (express.js) sends socket messages to the client js, but it obviously creates a huge amount of listeners, in fact it warns me saying:
"node) warning: possible EventEmitter memory leak detected. 11 listeners added. Use emitter.setMaxListeners() to increase limit."
on the other hand this code:
io.sockets.on('connection', function (socket) {
progressEvent.on('progress', function(percentage) {
console.log(percentage);
socket.emit('progress', { percent: percentage});
});
});
Doesn't send any message back to the client, the ProgressEvent is:
var util = require('util'),
events = require('events');
function ProgressEvent() {
if(false === (this instanceof ProgressEvent)) {
return new ProgressEvent();
}
events.EventEmitter.call(this);
}
util.inherits(ProgressEvent, events.EventEmitter);
ProgressEvent.prototype.download = function(percentage) {
var self = this;
self.emit('progress', percentage);
}
exports.ProgressEvent = ProgressEvent;
I've been a good day on this strange problem I can't really see why socket.io doesn't send the socket message to the client.
the whole project is here: https://github.com/aterreno/superuploader
Thanks for your attention & help
You shouldn't listen to socket.io connections inside of the progress event. It looks like you're trying to get socket.io to connect when a user uploads a file, but that will not do that. Instead it will listen for a new connection each time the progress event fires on the upload, which I'm guessing is pretty often and it's why you're getting the warning about too many listeners.
What you want to do instead is on the client side, when you initialize an upload, tell the server through socket.io. Then the server links up that socket.io client with their upload through their session, http://www.danielbaulig.de/socket-ioexpress/
Something like this should do it
io.sockets.on('connection', function(socket) {
var session = socket.handshake.session;
socket.on('initUpload', function() {
session.socket = socket;
});
socket.on('disconnect', function() {
session.socket = null;
});
});
And then in your route
req.form.on('progress', function(bytesReceived, bytesExpected){
var percent = (bytesReceived / bytesExpected * 100) | 0;
if (req.session.socket) {
socket.emit('progress', percent);
}
});
This only works with one upload per session, but you get the idea.

Categories

Resources