Websocket timeout - javascript

I need to handle a websocket timeout when the socket is trying to connect to a server available but busy.
More in deep I have a web application that can connect to several single-thread remote socket server in charge to post-process data passed as input.
The goal is to collect data from web GUI and then submit them to the first socket available and listening.
In case a server socket daemon is already processing data, it cannot serve the user request that has instead to be addressed to the second socket of the list.
Basically and with a practical example, I have such socket ready to accept a call coming from a web brworser:
ws://10.20.30.40:8080/
ws://10.20.30.40:8081/
ws://10.20.30.40:8082/
ws://10.20.30.40:8083/
ws://192.192.192.192:9001/
When the first socket receive the call and perform handshake, it starts to process data (and this process may took some minutes).
When another client do the same request on a different set of data, I would like that that the request is served by the 2nd socket available and so on.
So the question is... how can I contact the socket (the first of the list), wait 250 milliseconds and (in case of timeout) skip to the next one?
I've started from the following approach:
$.interval = function(func, wait, times) {
var interv = function(w, t) {
return function() {
if (typeof t === "undefined" || t-- > 0) {
setTimeout(interv, w);
try {
func.call(null);
} catch(e) {
t = 0;
throw e.toString();
}
} else {
alert('stop');
}
};
} (wait, times);
setTimeout(interv, wait);
}
handler = new WebSocket(url);
loop = 1;
$.interval(function(){
rs = handler.readyState;
console.log("(" + loop++ + ") readyState: " + rs);
}, 25, 10); //loop 10 times every 25 milliseconds
But how can I stop in case the socket.readyState became 1 (OPEN) or the limit is reached?

Related

Cloudflare websocket returns only one value of Date.now()

Ok so i made a websocket in cloudflare workers which creates a variable connectime when client connects with Date.now() and also when it disconnects. Then it calculates the difference and logs client disconnected after a connection of --ms, where -- is the difference between times..
addEventListener('fetch', (event) => {
event.respondWith(handleRequest(event.request))
})
async function handleRequest(request) {
const connecttime = Date.now();
const upgradeHeader = request.headers.get('Upgrade');
if (!upgradeHeader || upgradeHeader !== 'websocket') {
return new Response('Expected Upgrade: websocket', { status: 426 });
}
const webSocketPair = new WebSocketPair();
const [client, server] = Object.values(webSocketPair);
server.accept();
//Do something
server.addEventListener('close', event => {
const endtime = Date.now();
console.log(endtime);
const contime = endtime-connecttime;
console.log("client disconnected after a connection of " + contime + "ms");
})
The problem is it returns 0 value;
client disconnected after a connection of 0ms
This is by design and part of the security model of Cloudflare Workers - from the documentation:
Workers is designed to make it impossible for code to measure its own
execution time locally. For example, the value returned by Date.now()
is locked in place while code is executing. No other timers are
provided.
For a full explanation, I recommend the following documentation page

javascript asynchronous task handling

I am new to Node development.
I having trouble understanding the asynchronous nature of JS and Node. I am building a microservices backend web server, running Express in the gateway service which translated REST requests to series of messages published using a RabbitMQ asynchronous messaging (amqplib) module, which in turn, other services can subscribe to, process the request and then respond.
My service which processes the asynchronous request from the gateway looks like this:
amqp.connect('amqp://172.17.0.2', function(err, conn) {
console.log("connection created");
conn.createChannel(function(err, ch) {
console.log("channel created");
var exchange = 'main';
ch.assertExchange(exchange, 'topic', {durable: true});
ch.assertQueue('', {exclusive: true}, function(err, q) {
console.log(' [*] Waiting for logs. To exit press CTRL+C');
ch.bindQueue(q.queue, exchange, "VENUE_TOPIC.PENDING_STATUS.*.*");
ch.consume(q.queue, function(msg) {
console.log(" [x] %s:'%s'", msg.fields.routingKey, msg.content.toString());
var pending_event = JSON.parse(msg.content.toString())
console.log(pending_event.payload.id == 2)
console.log(pending_event.payload.id)
if (pending_event.payload.id == 1) {
var venue = getVenueByID(pending_event.payload.id);
const approved_event = new Event("VENUE_TOPIC", "APPROVED_STATUS", false, "READ_METHOD", {"venue":venue});
var key = approved_event.getMetaAsTopics();
var msg = Buffer.from(JSON.stringify(approved_event.getEventAsJSON()));
ch.assertExchange(exchange, 'topic', {durable: true});
ch.publish(exchange, key, msg, {persistent: true});
console.log(" [x] Sent '%s'", msg);
} else if (pending_event.payload.id == 2) {
sleep(10000); //this function checks the OS's clock to count time in ms
var venue = getVenueByID(pending_event.payload.id);
const approved_event = new Event("VENUE_TOPIC", "APPROVED_STATUS", false, "READ_METHOD", {"venue":venue});
var key = approved_event.getMetaAsTopics();
var msg = Buffer.from(JSON.stringify(approved_event.getEventAsJSON()));
ch.assertExchange(exchange, 'topic', {durable: true});
ch.publish(exchange, key, msg, {persistent: true});
console.log(" [x] Sent '%s'", msg);
}
}, {noAck: true});
});
});
});
Let's say I have 2 requests, one which takes long to complete and another which is shorter. The longer request comes in before the shorter one.
In my example the long process is ID === 2 and the short process is ID === 1.
Now, if I send a request where ID is 2 and then immediately send a request where ID is 1, I have to wait 10 seconds for the first to complete, and then the other completes.
I cannot understand if it is possible to process both requests concurrently, without having the long process block the short process until it completes.
I cannot comment since I don't have enough SO reputation, but I'll try to provide as much insight as possible.
Assuming sleep is this npm package this is expected behavior, so called blocking behavior. If you're planning to make non-blocking calls (like web requests) inside your consume function you will be fine and node will be able to consume further messages when it is processing. However if you need to do blocking/cpu heavy lifting and still be able to process incoming messages you will need to offload this work to some worker.
Try this code instead, it will "sleep" without blocking:
setTimeout(() => {
var venue = getVenueByID(pending_event.payload.id);
const approved_event = new Event("VENUE_TOPIC", "APPROVED_STATUS", false, "READ_METHOD", {
"venue": venue
});
var key = approved_event.getMetaAsTopics();
var msg = Buffer.from(JSON.stringify(approved_event.getEventAsJSON()));
ch.assertExchange(exchange, 'topic', {
durable: true
});
ch.publish(exchange, key, msg, {
persistent: true
});
console.log(" [x] Sent '%s'", msg);
}, 10000);

Why is there a huge first-time message delay for child processes?

So I have been doing some micro-optimization in Node.js. I'm noticing that when multithreading, there is a huge first-message delay on child processes. Take the following code for example:
RUN IT HERE
index.js
var cp = require('child_process');
var child = cp.fork(__dirname + '/worker.js');
var COUNT = 10;
var start;
child.on('message', (e) => {
var end = Date.now();
console.log('Response time', end - start);
if(COUNT--) {
sendMessage();
} else {
process.exit();
}
});
function sendMessage () {
start = Date.now();
child.send('hi!');
}
sendMessage();
worker.js
process.on('message', e => {
process.send('Good morning!');
});
Explanation: all i'm doing is creating a child process and sending it messages, to which it will immediately respond. I'm measuring the time between sending the message and receiving a response. The following output occurs nearly everytime:
Response time 51
Response time 0
Response time 0
Response time 0
Resp...
There is a huge delay between the first message/response couple. And I have noticed this for other projects I made with child processes as well. Why is this occuring? How can I fix it?
Edit: after some debugging, the delay seems to occur after the first child.send to the first process.on callback. There is nearly no delay in the response of the child process.
Related: https://github.com/nodejs/node/issues/3145

jQuery/AJAX set timeout when rate limit of 3rd party API is reached

In my app I make several nested AJAX calls to the LiquidPlanner API that limits requests to 30 requests every 15 seconds. When I hit the limit, I want to set a timeout of sorts to stop sending requests to the API until the 15 seconds have elapsed. This (at the moment) will only be used by one person ever, so multiple clients are not a concern.
Upon hitting the rate limit the response is:
{
"type":"Error",
"error":"Throttled",
"message":"34 requests, exceeds limit of 30 in 15 seconds. Try again in 7 seconds, or contact support#liquidplanner.com"
}
Here is some code, simplified for brevity:
$.getJSON('/dashboard/tasks/123, function(tasks) {
$.each(tasks, function(t, task) {
$.getJSON('/dashboard/project/987, function(project) {
$.getJSON('/dashboard/checklist-items/382983, function(checklist-items) {
// form some html here
});
});
});
});
So at any point in this process I could hit the limit and need to wait until the timeout has completed.
I am also open to suggestions to better form the requests instead of nesting them.
Another solution that probably prevents hammering better is a queue - however you need to be aware that the order of requests could be significantly different using this method. And that only one request will ever run at a time (so total response times may increase significantly depending on the use case).
//Keep track of queue
var queue = [];
//Keep track of last failed request
var failed_request = false;
function do_request(url, callback) {
//Just add to queue
queue.push({
url:url,
callback:callback
});
//If the queue was empty send it off
if (queue.length === 1) attempt_fetch();
}
function attempt_fetch() {
//If nothing to do just return
if (queue.length === 0 && failed_request === false) return;
//Get the url and callback from the failed request if any,
var parms;
if (failed_request !== false) {
parms = failed_request;
} else {
//otherwise first queue element
parms = queue.shift();
}
//Do request
$.getJSON(parms.url, function(response) {
//Detect throttling
if (response.type === 'error' && response.error === 'throttled') {
//Store the request
failed_request = parms;
//Call self in 15 seconds
setTimeout(function(){
attempt_fetch();
}, 15000);
} else {
//Request went fine, let the next call pick from the queue
failed_request = false;
//Do your stuff
parms.callback(response);
//And send the next request
attempt_fetch();
}
}
}
...your logic still remains largely unchanged:
do_request('/dashboard/tasks/123', function(tasks) {
$.each(tasks, function(t, task) {
do_request('/dashboard/project/987', function(project) {
do_request('/dashboard/checklist-items/382983', function(checklist_items) {
// form some html here
});
});
});
});
Disclaimer: Still completely untested.
As far as design patterns for chaining multiple requests, take a look at the chaining section in the following article: http://davidwalsh.name/write-javascript-promises . Basically, you could create a service that exposes a method for each type of request, which returns the promise object and then chain them together as needed.
As far as you question about setting a timeout, given the information you provided, it is a bit difficult to advice you on it, but if that is absolutely all we have, I would create a request queue ( a simple array that allows you to push new requests at the end and pop the from the head ). I would then execute the known requests in order and inspect the response. If the response was a timeout error, set a timeout flag that the request executor would honor, and if successful, either queue additional requests or create the html output. This is probably a pretty bad design, but is all I can offer given the information you provided.
Write a wrapper that will detect the rate-limited response:
//Keep track of state
var is_throttled = false;
function my_wrapper(url, callback) {
//No need to try right now if already throttled
if (is_throttled) {
//Just call self in 15 seconds time
setTimeout(function(){
return my_wrapper(url, callback);
}, 15000);
}
//Get your stuff
$.getJSON(url, function(response) {
//Detect throttling
if (response.type === 'error' && response.error === 'throttled') {
/**
* Let "others" know that we are throttled - the each-loop
* (probably) makes this necessary, as it may send off
* multiple requests at once... If there's more than a couple
* you will probably need to find a way to also delay those,
* otherwise you'll be hammering the server before realizing
* that you are being limited
*/
is_throttled = true
//Call self in 15 seconds
setTimeout(function(){
//Throttling is (hopefully) over now
is_throttled = false;
return my_wrapper(url, callback);
}, 15000);
} else {
//If not throttled, just call the callback with the data we have
callback(response);
}
}
}
Then you should be able to rewrite your logic to:
my_wrapper('/dashboard/tasks/123', function(tasks) {
$.each(tasks, function(t, task) {
my_wrapper('/dashboard/project/987', function(project) {
my_wrapper('/dashboard/checklist-items/382983', function(checklist_items) {
// form some html here
});
});
});
});
Disclaimer: Totally untested - my main concern is the scope of the url and callback... But it's probably easier for you to test.

How to kill node.js server connection when client browser exits?

I'm trying to write a simple node.js server with Server Sent Event abilities without socket.io. My code runs well enough, but I ran into a huge problem when I was testing it.
If I connect to the node.js server from my browser, it will receive the server events fine. But when I refresh the connection, the same browser will start to receive the event twice. If I refresh n times, the browser will receive data n+1 times.
The most I tried this out with was 16 times (16 refreshes) before I stopped trying to count it.
See the screenshot below of the console from my browser. After the refresh (it tries to make an AJAX call), it will output "SSE persistent connection established!" and afterwards it will start receiving events.
Note the time when the events arrive. I get the event twice at 19:34:07 (it logs to the console twice -- upon receipt of the event and upon writing of the data to the screen; so you can see four logs there).
I also get the event twice at 19:34:12.
Here's what it looks like at the server side after the client has closed the connection (with source.close() ):
As you can see, it is still trying to send messages to the client! Also, it's trying to send the messages twice (so I know this is a server side problem)!
I know it tries to send twice because it sent two heartbeats when it's only supposed to send once per 30 seconds.
This problem magnifies when open n tabs. What happens is each open tab will receive n*n events. Basically, how I interpret it is this:
Opening the first tab, I subscribe to the server once. Opening the second tab, I subscribe both open tabs to the server once again -- so that's 2 subscriptions per tab. Opening a third tab, I subscribe all three open tabs to the events once more, so 3 subscriptions per tab, total of 9. And so on...
I can't verify this, but my guess is that if I can create one subscription, I should be able to unsubscribe if certain conditions are met (ie, heartbeat fails, I must disconnect). And the reason this is happening is only because of the following:
I started setInterval once, and an instance of it will forever run unless I stop it, or
The server is still trying to send data to the client trying to keep the connection open?
As for 1., I've already tried to kill the setInterval with clearInterval, it doesn't work. As for 2, while it's probably impossible, I'm leaning towards believing that...
Here's server side code snippets of just the relevant parts (editing the code after the suggestions from answers):
server = http.createServer(function(req, res){
heartbeatPulse(res);
}).listen(8888, '127.0.0.1', 511);
server.on("request", function(req, res){
console.log("Request Received!");
var route_origin = url.parse(req.url);
if(route_origin.query !== null){
serveAJAX(res);
} else {
serveSSE(res);
//hbTimerID = timerID.hbID;
//msgTimerID = timerID.msgID;
}
req.on("close", function(){
console.log("close the connection!");
res.end();
clearInterval(res['hbID']);
clearInterval(res['msgID']);
console.log(res['hbID']);
console.log(res['msgID']);
});
});
var attachListeners = function(res){
/*
Adding listeners to the server
These events are triggered only when the server communicates by SSE
*/
// this listener listens for file changes -- needs more modification to accommodate which file changed, what to write
server.addListener("file_changed", function(res){
// replace this with a file reader function
writeUpdate = res.write("data: {\"date\": \"" + Date() + "\", \"test\": \"nowsssss\", \"i\": " + i++ + "}\n\n");
if(writeUpdate){
console.log("Sent SSE!");
} else {
console.log("SSE failed to send!");
}
});
// this listener enables the server to send heartbeats
server.addListener("hb", function(res){
if(res.write("event: hb\ndata:\nretry: 5000\n\n")){
console.log("Sent HB!");
} else {
// this fails. We can't just close the response, we need to close the connection
// how to close a connection upon the client closing the browser??
console.log("HB failed! Closing connection to client...");
res.end();
//console.log(http.IncomingMessage.connection);
//http.IncomingMessage.complete = true;
clearInterval(res['hbID']);
clearInterval(res['msgID']);
console.log(res['hbID']);
console.log(res['msgID']);
console.log("\tConnection Closed.");
}
});
}
var heartbeatPulse = function(res){
res['hbID'] = "";
res['msgID'] = "";
res['hbID'] = setInterval(function(){
server.emit("hb", res);
}, HB_period);
res['msgID'] = setInterval(function(){
server.emit("file_changed", res);
}, 5000);
console.log(res['hbID']);
console.log(res['msgID'])
/*return {
hbID: hbID,
msgID: msgID
};*/
}
var serveSSE = function(res){
res.writeHead(200, {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
"Access-Control-Allow-Origin": "*",
"Connection": "keep-alive"
});
console.log("Establishing Persistent Connection...");
if(res.write("event: connector\ndata:\nretry: 5000\n\n")){
// Only upon receiving the first message will the headers be sent
console.log("Established Persistent Connection!");
}
attachListeners(res);
console.log("\tRequested via SSE!");
}
This is largely a self project for learning, so any comments are definitely welcome.
One issue is that you are storing request-specific variables outside the scope of the http server. So what you could do instead is to just call your setInterval()s once right after you start the http server and not start individual timers per-request.
An alternative to adding event handlers for every request might be to instead add the response object to an array that is looped through inside each setInterval() callback, writing to the response. When the connection closes, remove the response object from the array.
The second issue about detecting a dead connection can be fixed by listening for the close event on the req object. When that is emitted, you remove the server event (e.g. file_changed and hb) listeners you added for that connection and do whatever other necessary cleanup.
Here's how I got it to work:
Made a global heart object that contains the server plus all the methods to modify the server
var http = require("http");
var url = require("url");
var i = 0; // for dev only
var heart = {
server: {},
create: function(object){
if(!object){
return false;
}
this.server = http.createServer().listen(8888, '127.0.0.1');
if(!this.server){
return false;
}
for(each in object){
if(!this.listen("hb", object[each])){
return false;
}
}
return true;
},
listen: function(event, callback){
return this.server.addListener(event, callback);
},
ignore: function(event, callback){
if(!callback){
return this.server.removeAllListeners(event);
} else {
return this.server.removeListener(event, callback);
}
},
emit: function(event){
return this.server.emit(event);
},
on: function(event, callback){
return this.server.on(event, callback);
},
beating: 0,
beatPeriod: 1000,
lastBeat: false,
beat: function(){
if(this.beating === 0){
this.beating = setInterval(function(){
heart.lastBeat = heart.emit("hb");
}, this.beatPeriod);
return true;
} else {
return false;
}
},
stop: function(){ // not applicable if I always want the heart to beat
if(this.beating !== 0){
this.ignore("hb");
clearInterval(this.beating);
this.beating = 0;
return true;
} else {
return false;
}
},
methods: {},
append: function(name, method){
if(this.methods[name] = method){
return true;
}
return false;
}
};
/*
Starting the heart
*/
if(heart.create(object) && heart.beat()){
console.log("Heart is beating!");
} else {
console.log("Failed to start the heart!");
}
I chained the req.on("close", callback) listener on the (essentially) server.on("request", callback) listener, then remove the callback if the close event is triggered
I chained a server.on("heartbeat", callback) listener on the server.on("request", callback) listener and made a res.write() when a heartbeat is triggered
The end result is each response object is being dictated by the server's single heartbeat timer, and will remove its own listeners when the request is closed.
heart.on("request", function(req, res){
console.log("Someone Requested!");
var origin = url.parse(req.url).query;
if(origin === "ajax"){
res.writeHead(200, {
"Content-Type": "text/plain",
"Access-Control-Allow-Origin": "*",
"Connection": "close"
});
res.write("{\"i\":\"" + i + "\",\"now\":\"" + Date() + "\"}"); // this needs to be a file reading function
res.end();
} else {
var hbcallback = function(){
console.log("Heartbeat detected!");
if(!res.write("event: hb\ndata:\n\n")){
console.log("Failed to send heartbeat!");
} else {
console.log("Succeeded in sending heartbeat!");
}
};
res.writeHead(200, {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
"Access-Control-Allow-Origin": "*",
"Connection": "keep-alive"
});
heart.on("hb", hbcallback);
req.on("close", function(){
console.log("The client disconnected!");
heart.ignore("hb", hbcallback);
res.end();
});
}
});

Categories

Resources