Reduce lags on a nodejs game - javascript

I have created a nodejs game. i have a setInterval function on the server which sends all the objects near the player. but it doesnt seem to be as smooth as i get when i run it locally. the data package has about 60 objects. here's the server code for sending objects and receiving them on the client. any way to compress the package or reduce the lags? pinka.herokuapp.com
server:
setInterval(function() {
server.update();
for(var key in sockets) {
var socket = sockets[key];
var player = server.players.filter(function(p) {
return p.id == socket.id
})[0];
var package = [];
var blobs = server.getNodesInRange(player.centerX, player.centerY);
for(var i = 0; i < blobs.length; i++) {
var b = blobs[i];
package.push({
x: b.x,
y: b.y,
nick: b.nick,
size: Math.sqrt(b._mass) * 10,
hue: b.hue
});
};
socket.emit("update blobs", package);
socket.emit("leaders", server.getLeaders());
if(player.blobs.length == 0) {
socket.emit("dead");
continue;
}
var translateX = player._centerX * player._drawZoom - player.screenWidth / 2;
var translateY = player._centerY * player._drawZoom - player.screenHeight / 2;
socket.emit("center and zoom", {
centerX: translateX,
centerY: translateY,
zoom: player._drawZoom
});
}
}, 1000/60);
client:
socket.on("update blobs", function(data) {
blobs = data;
});
this is the whole communication part.

As Jonas W. said, the problem is in the server-client communication.
To be efficient a realtime system with socket.io should be based on events and not in interval checks.
I'd suggest you to have something like this:
On the client, emit a 'user:move' event when the user moves. Prevent too many events to relief the server with unnecessary updates.
On the server, react to a specific 'player:move' event. If the events needs to be broadcasted to the other players, a filter with the ones that can actually "see" the action will avoid unnecessary information for the client too.
An example with pseudo code:
Client
let updating = false;
let timeout = 0;
// Assuming this function is triggered everytime the user moves (i.e. presses a key)
onUserMove (data) {
if ('same press as before' && updating) {
// ignore move if it's the same that has just been done
return;
} else {
socket.emit('user:move', data);
// 'Block' the emit for same movement for 20ms
updating = true;
clearTimeout(timeout);
timeout = setTimeout(() => {
updating = false;
}, 20);
}
}
Server
socket.on('user:move', (data) => {
const dataForTheUser = processingYouAreAlreadyDoing(data);
socket.emit('data:for:user', dataForTheUser);
// In case there's information to be sent to every user
const dataToBroadcast = getDataToBroadcast(data);
const usersToBroadcast = getCloseUsers(data);
for (let user in usersToBroadcast) {
user.socket.emit('whatever:event', dataToBroadcast);
}
})

Related

Javascript: Infinite loop in webworker [duplicate]

I want to sort an array, using Web Workers. But this array might receive new values over time, while the worker is still performing the sort function.
So my question is, how can I "stop" the sorting computation on the worker after receiving the new item, so it can perform the sort on the array with that item, while still keeping the sorting that was already made?
Example:
let worker = new Worker('worker.js');
let list = [10,1,5,2,14,3];
worker.postMessage({ list });
setInterval(() => worker.postMessage({ num: SOME_RANDOM_NUM, list }), 100);
worker.onmessage = event => {
list = event.data.list;
}
So lets say that, I've passed 50, the worker made some progress in the sorting before that and now I have something like this:
[1, 2, 3, 10, 5, 14, 50]. Which means the sorting stopped at index 3. So I pass this new array back to the worker, so it can continue the sorting from position 3.
How can I accomplish that, since there is no way to pause/resume a web worker?
Even though the Worker works on an other thread than the one of your main page, and can thus run continuously without blocking the UI, it still runs on a single thread.
This means that until your sort algorithm has finished, the Worker will delay the execution of the message event handler; it is as blocked as would be the main thread.
Even if you made use of an other Worker from inside this worker, the problem would be the same.
The only solution would be to use a kind of generator function as the sorter, and to yield it every now and then so that the events can get executed.
But doing this will drastically slow down your sorting algorithm.
To make it better, you could try to hook to each Event Loop, thanks to a MessageChannel object: you talk in one port and receive the message in the next Event loop. If you talk again to the other port, then you have your own hook to each Event loop.
Now, the best would be to run a good batch in every of these Event loop, but for demo, I'll call only one instance of our generator function (that I borrowed from this Q/A)
const worker = new Worker(getWorkerURL());
worker.onmessage = draw;
onclick = e => worker.postMessage(0x0000FF/0xFFFFFF); // add a red pixel
// every frame we request the current state from Worker
function requestFrame() {
worker.postMessage('gimme a frame');
requestAnimationFrame(requestFrame);
}
requestFrame();
// drawing part
const ctx = canvas.getContext('2d');
const img = ctx.createImageData(50, 50);
const data = new Uint32Array(img.data.buffer);
ctx.imageSmoothingEnabled = false;
function draw(evt) {
// converts 0&1 to black and white pixels
const list = evt.data;
list.forEach((bool, i) =>
data[i] = (bool * 0xFFFFFF) + 0xFF000000
);
ctx.setTransform(1,0,0,1,0,0);
ctx.clearRect(0,0,canvas.width,canvas.height);
ctx.putImageData(img,0,0);
// draw bigger
ctx.scale(5,5);
ctx.drawImage(canvas, 0,0);
}
function getWorkerURL() {
const script = document.querySelector('[type="worker-script"]');
const blob = new Blob([script.textContent]);
return URL.createObjectURL(blob);
}
body{
background: ivory;
}
<script type="worker-script">
// our list
const list = Array.from({length: 2500}).map(_=>+(Math.random()>.5));
// our sorter generator
let sorter = bubbleSort(list);
let done = false;
/* inner messaging channel */
const msg_channel = new MessageChannel();
// Hook to every Event loop
msg_channel.port2.onmessage = e => {
// procede next step in sorting algo
// could be a few thousands in a loop
const state = sorter.next();
// while running
if(!state.done) {
msg_channel.port1.postMessage('');
done = false;
}
else {
done = true;
}
}
msg_channel.port1.postMessage("");
/* outer messaging channel (from main) */
self.onmessage = e => {
if(e.data === "gimme a frame") {
self.postMessage(list);
}
else {
list.push(e.data);
if(done) { // restart the sorter
sorter = bubbleSort(list);
msg_channel.port1.postMessage('');
}
}
};
function* bubbleSort(a) { // * is magic
var swapped;
do {
swapped = false;
for (var i = 0; i < a.length - 1; i++) {
if (a[i] > a[i + 1]) {
var temp = a[i];
a[i] = a[i + 1];
a[i + 1] = temp;
swapped = true;
yield swapped; // pause here
}
}
} while (swapped);
}
</script>
<pre> click to add red pixels</pre>
<canvas id="canvas" width="250" height="250"></canvas>
Note that the same can be achieved with an async function, which may be more practical in some cases:
const worker = new Worker(getWorkerURL());
worker.onmessage = draw;
onclick = e => worker.postMessage(0x0000FF/0xFFFFFF); // add a red pixel
// every frame we request the current state from Worker
function requestFrame() {
worker.postMessage('gimme a frame');
requestAnimationFrame(requestFrame);
}
requestFrame();
// drawing part
const ctx = canvas.getContext('2d');
const img = ctx.createImageData(50, 50);
const data = new Uint32Array(img.data.buffer);
ctx.imageSmoothingEnabled = false;
function draw(evt) {
// converts 0&1 to black and white pixels
const list = evt.data;
list.forEach((bool, i) =>
data[i] = (bool * 0xFFFFFF) + 0xFF000000
);
ctx.setTransform(1,0,0,1,0,0);
ctx.clearRect(0,0,canvas.width,canvas.height);
ctx.putImageData(img,0,0);
// draw bigger
ctx.scale(5,5);
ctx.drawImage(canvas, 0,0);
}
function getWorkerURL() {
const script = document.querySelector('[type="worker-script"]');
const blob = new Blob([script.textContent]);
return URL.createObjectURL(blob);
}
body{
background: ivory;
}
<script type="worker-script">
// our list
const list = Array.from({length: 2500}).map(_=>+(Math.random()>.5));
// our sorter generator
let done = false;
/* outer messaging channel (from main) */
self.onmessage = e => {
if(e.data === "gimme a frame") {
self.postMessage(list);
}
else {
list.push(e.data);
if(done) { // restart the sorter
bubbleSort(list);
}
}
};
async function bubbleSort(a) { // async is magic
var swapped;
do {
swapped = false;
for (var i = 0; i < a.length - 1; i++) {
if (a[i] > a[i + 1]) {
const temp = a[i];
a[i] = a[i + 1];
a[i + 1] = temp;
swapped = true;
}
if( i % 50 === 0 ) { // by batches of 50?
await waitNextTask(); // pause here
}
}
} while (swapped);
done = true;
}
function waitNextTask() {
return new Promise( (resolve) => {
const channel = waitNextTask.channel ||= new MessageChannel();
channel.port1.addEventListener("message", (evt) => resolve(), { once: true });
channel.port2.postMessage("");
channel.port1.start();
});
}
bubbleSort(list);
</script>
<pre> click to add red pixels</pre>
<canvas id="canvas" width="250" height="250"></canvas>
There are two decent options.
Option 1: Worker.terminate()
The first is just to kill your existing web worker and start a new one. For that you can use Worker.terminate().
The terminate() method of the Worker interface immediately terminates the Worker. This does not offer the worker an opportunity to finish its operations; it is simply stopped at once.
The only downsides of this approach are:
You lose all worker state. If you had to copy a load of data into it for the request you have to do it all again.
It involves thread creation and destruction, which isn't as slow as most people think but if you terminate web workers a lot it might cause issues.
If neither of those are an issue it is probably the easiest option.
In my case I have lots of state. My worker is rendering part of an image, and when the user pans to a different area I want it to stop what it is doing and start rendering the new area. But the data needed to render the image is pretty huge.
In your case you have the state of your (presumably huge) list that you don't want to use.
Option 2: Yielding
The second option is basically to do cooperative multitasking. You run your computation as normal, but every now and then you pause (yield) and say "should I stop?", like this (this is for some nonsense calculation, not sorting).
let requestId = 0;
onmessage = event => {
++requestId;
sortAndSendData(requestId, event.data);
}
function sortAndSendData(thisRequestId, data) {
let isSorted = false;
let total = 0;
while (data !== 0) {
// Do a little bit of computation.
total += data;
--data;
// Check if we are still the current request ID.
if (thisRequestId !== requestId) {
// Data was changed. Cancel this sort.
return;
}
}
postMessage(total);
}
This won't work though because sortAndSendData() runs to completion and blocks the web worker's event loop. We need some way to yield just before thisRequestId !== requestId. Unfortunately Javascript doesn't quite have a yield method. It does have async/await so we might try this:
let requestId = 0;
onmessage = event => {
console.log("Got event", event);
++requestId;
sortAndSendData(requestId, event.data);
}
async function sortAndSendData(thisRequestId, data) {
let isSorted = false;
let total = 0;
while (data !== 0) {
// Do a little bit of computation.
total += data;
--data;
await Promise.resolve();
// Check if we are still the current request ID.
if (thisRequestId !== requestId) {
console.log("Cancelled!");
// Data was changed. Cancel this sort.
return;
}
}
postMessage(total);
}
Unfortunately it doesn't work. I think it's because async/await executes things eagerly using "microtasks", which get executed before pending "macrotasks" (our web worker message) if possible.
We need to force our await to become a macrotask, which you can do using setTimeout(0):
let requestId = 0;
onmessage = event => {
console.log("Got event", event);
++requestId;
sortAndSendData(requestId, event.data);
}
function yieldToMacrotasks() {
return new Promise((resolve) => setTimeout(resolve));
}
async function sortAndSendData(thisRequestId, data) {
let isSorted = false;
let total = 0;
while (data !== 0) {
// Do a little bit of computation.
total += data;
--data;
await yieldToMacrotasks();
// Check if we are still the current request ID.
if (thisRequestId !== requestId) {
console.log("Cancelled!");
// Data was changed. Cancel this sort.
return;
}
}
postMessage(total);
}
This works! However it is extremely slow. await yieldToMacrotasks() takes approximately 4 ms on my machine with Chrome! This is because browsers set a minimum timeout on setTimeout(0) of something like 1 or 4 ms (the actual minimum seems to be complicated).
Fortunately another user pointed me to a quicker way. Basically sending a message on another MessageChannel also yields to the event loop, but isn't subject to the minimum delay like setTimeout(0) is. This code works and each loop only takes ~0.04 ms which should be fine.
let currentTask = {
cancelled: false,
}
onmessage = event => {
currentTask.cancelled = true;
currentTask = {
cancelled: false,
};
performComputation(currentTask, event.data);
}
async function performComputation(task, data) {
let total = 0;
let promiseResolver;
const channel = new MessageChannel();
channel.port2.onmessage = event => {
promiseResolver();
};
while (data !== 0) {
// Do a little bit of computation.
total += data;
--data;
// Yield to the event loop.
const promise = new Promise(resolve => {
promiseResolver = resolve;
});
channel.port1.postMessage(null);
await promise;
// Check if this task has been superceded by another one.
if (task.cancelled) {
return;
}
}
// Return the result.
postMessage(total);
}
I'm not totally happy about it - it relies on postMessage() events being processed in FIFO order, which I doubt is guaranteed. I suspect you could rewrite the code to make it work even if that isn't true.
You can do it with some trick – with the help of setTimeout function interrupting. For example it is not possible without an addition thread to execute 2 functions parallel, but with setTimeout function interrupting trick we can do it like follows:
Example of parallel execution of functions
var count_0 = 0,
count_1 = 0;
function func_0()
{
if(count_0 < 3)
setTimeout(func_0, 0);//the same: setTimeout(func_0);
console.log('count_0 = '+count_0);
count_0++
}
function func_1()
{
if(count_1 < 3)
setTimeout(func_1, 0);
console.log('count_1 = '+count_1)
count_1++
}
func_0();
func_1();
You will get this output:
count_0 = 0
count_1 = 0
count_0 = 1
count_1 = 1
count_0 = 2
count_1 = 2
count_0 = 3
count_1 = 3
Why is it possible? Because the setTimeout function needs some time to be executed. And this time is even enought for the execution of some part from your following code.
Solution for you
For this case you have to write your own array sort function (or you can also use the following function from me) because we can not interrupt the native sort function. And in this your own function you have to use this setTimeout function interrupting trick. And you can receive your message event notification.
In the following example I have the interrupting in the half length of my array, and you can change it if you want.
Example with custom sort function interrupting
var numbers = [4, 2, 1, 3, 5];
// this is my bubble sort function with interruption
/**
* Sorting an array. You will get the same, but sorted array.
* #param {array[]} arr – array to sort
* #param {number} dir – if dir = -1 you will get an array like [5,4,3,2,1]
* and if dir = 1 in opposite direction like [1,2,3,4,5]
* #param {number} passCount – it is used only for setTimeout interrupting trick.
*/
function sortNumbersWithInterruption(arr, dir, passCount)
{
var passes = passCount || arr.length,
halfOfArrayLength = (arr.length / 2) | 0; // for ex. 2.5 | 0 = 2
// Why we need while loop: some values are on
// the end of array and we have to change their
// positions until they move to the first place of array.
while(passes--)
{
if(!passCount && passes == halfOfArrayLength)
{
// if you want you can also not write the following line for full break of sorting
setTimeout(function(){sortNumbersWithInterruption(arr, dir, passes)}, 0);
/*
You can do here all what you want. Place 1
*/
break
}
for(var i = 0; i < arr.length - 1; i++)
{
var a = arr[i],
b = arr[i+1];
if((a - b) * dir > 0)
{
arr[i] = b;
arr[i+1] = a;
}
}
console.log('array is: ' + arr.join());
}
if(passCount)
console.log('END sring is: ' + arr.join());
}
sortNumbersWithInterruption(numbers, -1); //without passCount parameter
/*
You can do here all what you want. Place 2
*/
console.log('The execution is here now!');
You will get this output:
array is: 4,2,3,5,1
array is: 4,3,5,2,1
The execution is here now!
array is: 4,5,3,2,1
array is: 5,4,3,2,1
END sring is: 5,4,3,2,1
You can do it with insertion sort (kind of).
Here is the idea:
Start your worker with an internal empty array (empty array is sorted obviously)
Your worker receives only elements not the entire array
Your worker insert any received element right in correct position into the array
Every n seconds, the worker raises a message with the current array if it has changed after the last event. (If you prefer, you can send the array on every insertion, but is more efficient to buffer somehow)
Eventually, you get the entire array, if any item is added, you will receive the updated array to.
NOTE: Because your array is always sorted, you can insert in correct position using binary search. This is very efficient.
I think the case comes down to careful management of postMessage calls and amount of data passed to be processed at a time. Was dealing with problem of this kind - think about not sending all new data into the function at once but rather creating your own queue and when small enough portion of the task has been acomplished by webworker thread send a message back to the main thread and decide to send the next portion, wait or quit.
In Your case, e.g. one time You get 9000 new items, next 100k - maybe create a queue/buffer that adds next 10k new elements each time webworker is done processing last data change.
const someWorker = new Worker('abc.js');
var processingLock = false;
var queue = [];
function newDataAction(arr = null) {
if (arr != null) {
queue = queue.concat(arr);
}
if (!processingLock) {
processingLock = true;
var data = [];
for (let i = 0; i < 10000 && queue.length > 0; i++) {
data.push(queue.pop());
}
worker.postMessage(data);
}
}
someWorker.addEventListener('message', function(e) {
if (e.data == 'finished-last-task') {
processingLock = false;
if (queue.length > 0) {
newDataAction();
}
}
});
Worked through many sorting algorithms and I don't see how sending new data into an sorting algorithm with partially sorted array makes much difference in terms of compuation time from sorting them both sequentially and performing a merge.

Workaround for new Edge Indexeddb bug?

Microsoft released update kb4088776 in the past couple days, which has had a devastating effect on performance of indexedDb openCursor.
The simple fiddle here shows the problem. With the update, the "retrieval" time is 40 seconds or more. Prior to the update, it is around 1 second.
https://jsfiddle.net/L7q55ad6/23/
Relevant retrieval portion is here:
var _currentVer = 1;
function _openDatabase(fnSuccess) {
var _custDb = window.indexedDB.open("MyDatabase", _currentVer);
_custDb.onsuccess = function (event) {
var db = event.target.result;
fnSuccess(db);
}
_custDb.onerror = function (event) {
_custDb = null;
fnSuccess(null); // should use localData
}
_custDb.onupgradeneeded = function (event) {
var db = event.target.result;
var txn = event.target.transaction;
// Create an objectStore for this database
if (event.oldVersion < _currentVer) {
var customer = db.createObjectStore("customer", { keyPath: "guid" });
var index = customer.createIndex("by_id", "id", { unique: false });
}
};
}
function _retrieveCustomers(fn) {
_openDatabase(function (db) {
if (db == null)
{
alert("not supported");
return;
}
var customers = [];
var transaction = db.transaction("customer", "readonly");
var objectStore = transaction.objectStore("customer");
if (typeof objectStore.getAll === 'function') {
console.log("using getAll");
objectStore.getAll().onsuccess = function (event) {
fn(event.target.result);
};
}
else {
console.log("using openCursor");
objectStore.openCursor().onsuccess = function (event) {
var cursor = event.target.result;
if (cursor) {
customers.push(cursor.value);
cursor.continue();
}
else {
fn(customers);
}
};
}
});
}
The time to create and add the customers is basically normal, only the retrieval is bad. Edge has never supported the getAll method and it still doesn't after the update.
The only workaround I can think of would be to use localStorage instead, but unfortunately our data set is too large to fit into the 10MB limit. It is actually faster now to retrieve from our servers and convert the text to javascript objects, defeating the main purpose of indexeddb.
I don't have Edge so I can't test this, but does it happen with get too, or just openCursor? If get still performs well, you could store an index (in your example, the list of primary keys; in your real app, maybe something more complicated) in localStorage, and then use that to call get on each one.

Socket.io receiving data from multiple sockets in Node

I started developing in Node just a couple of days ago. I'm currently developing an online game that uses Node as server-side application. This application simply provides the .html and .js files to the client and handles the logic of the game.
I'm using Socket.io to exchange data with the clients. When a new client connects to the server through Socket.io the program looks for a room (array of objects containing the sockets for the two users and the game object that handles the logic) that has an empty space and places the user (registering the socket) in it.
I've been able to correctly send to each client the position of the sprites, but I have a problem with retrieving data from each socket: If I try to send some data from the client to the server, that data flows through only one of the two sockets, no matter which client is sending that.
Initializing the room array
for(var i = 0; i < roomsNumber; i++) {
//player1 and player 2 are going to store the sockets
rooms[i] = { player1 : null, player2 : null, game: null};
}
Socket.io handling the connections
var io = require('socket.io')(server);
io.on('connection', function(socket){
console.log("We have a new client: " + socket.id);
insertPlayer(socket);
socket.on('disconnect', function() {
console.log("Client has disconnected: " + socket.id);
removePlayer(socket);
});
});
Adding the clients to rooms
function insertPlayer(socket) { //Check for an empty spot in a room
for(var i = 0; i < rooms.length; i++) {
if(!rooms[i].player1) {
rooms[i].player1 = socket; //Register the socket to the room
checkRooms();
return;
}
if(!rooms[i].player2) {
rooms[i].player2 = socket; //Register the socket to the room
checkRooms();
return;
}
}
}
Creating the Game object when a room if full (and destructing it when a player leaves)
function checkRooms() { //Checks if a room is full to initialize the game
for(var i = 0; i < rooms.length; i++) {
if(rooms[i].player1 && rooms[i].player2 && !rooms[i].game) {
rooms[i].game = new Game([rooms[i].player1, rooms[i].player2]); //The constructor passes the sockets
console.log('New game started');
}
if((!rooms[i].player1 || !rooms[i].player2) && rooms[i].game) {
rooms[i].game.endGame();
rooms[i].game = null;
console.log('Game stopped');
}
}
}
Receiving data in the Game class
constructor(sockets) {
this.sockets = sockets;
this.snakes = [];
var self = this;
for(var i = 0; i < this.sockets.length; i++) {
this.snakes[i] = new Snake(i*100 + 100, 200);
var currentSnake = this.snakes[i];
var currentSocket = this.sockets[i];
currentSocket.on('keyPressed', function(data) {
currentSnake.updateDirection(data); //Calls a Game method that updates the direction of the sprite
console.log(currentSocket.id + " - " + data);
});
}
I see a problem with the currentSnake variable in your Game constructor. That variable is scoped to your whole constructor so there's only one variable there, but you're trying to have a separate variable for each player and that isn't working. Thus, every time you get the keyPressed message, you operation on the same snake, no matter which player the key came from.
If you're using node.js v6+, then change var currentSnake in that constructor to let currentSnake so that the variable is separately scoped to each invocation of the for loop.
constructor(sockets) {
this.sockets = sockets;
this.snakes = [];
var self = this;
for(var i = 0; i < this.sockets.length; i++) {
this.snakes[i] = new Snake(i*100 + 100, 200);
// change these to be declared with let so they are separately scoped
// for each invocation of the for loop
let currentSnake = this.snakes[i];
let currentSocket = this.sockets[i];
currentSocket.on('keyPressed', function(data) {
currentSnake.updateDirection(data); //Calls a Game method that updates the direction of the sprite
console.log(currentSocket.id + " - " + data);
});
}
}
Or, if you are running an older version of node.js, switch to .forEach() to replace the for loop as this will also make a new separate scope for each invocation of the loop.
constructor(sockets) {
this.sockets = sockets;
this.snakes = [];
var self = this;
this.sockets.forEach(function(currentSocket, i) {
this.snakes[i] = new Snake(i*100 + 100, 200);
var currentSnake = this.snakes[i];
currentSocket.on('keyPressed', function(data) {
currentSnake.updateDirection(data); //Calls a Game method that updates the direction of the sprite
console.log(currentSocket.id + " - " + data);
});
});
}

Intermittent failure in nodeJS timer function

I am not familiar with this function, however I am seeing intermittent failures, sometimes the timer function will execute and the newState variable switches, sometimes it doesn't. Please can you check my understanding of what this is doing?
function motionHandler() {
console.log('im in motionhandler func')
var newState = true;
changeAction(newState);
if(this.timer !== undefined) clearTimeout(this.timer);
this.timer = setTimeout(function(){changeAction(!newState);}, this.window_seconds * 1000);
};
From what I understand, when this function executes I set the newState variable to true. I then execute changeAction which sets my motion detector to "true" (motion detected).
I then create a timer. If this.timer has something in it, then clear. I then create a timeout which will countdown from window_seconds * 1000 (ie. 5x1000 milliseconds = 5 seconds). Once that timeout is reached, I will execute the changeAction function and set newState to the opposite of what it currently is?
Assuming all of that is correct, sometimes newState gets reset, other times it doesn't.
I am executing the motionHandler function every time I receive a particular RF code from a transmitter. The timeout is there to reset the motion detector back to false when no codes are received.
The full code is actually a plugin for home bridge, and can be seen here:
https://github.com/mattnewham/homebridge-RFReceiver/blob/master/index.js
This is my first real foray into Javascript/NodeJS so I don't really know how to troubleshoot this (other than my console.logs!)
Full code:
var Service;
var Characteristic;
var rpi433 = require("rpi-433"),
rfSniffer = rpi433.sniffer({
pin: 2, //Snif on GPIO 2 (or Physical PIN 13)
debounceDelay: 1000 //Wait 500ms before reading another code
}),
rfEmitter = rpi433.emitter({
pin: 0, //Send through GPIO 0 (or Physical PIN 11)
pulseLength: 350 //Send the code with a 350 pulse length
});
var debug = require("debug")("RFReceiverAccessory");
var crypto = require("crypto");
module.exports = function(homebridge) {
Service = homebridge.hap.Service;
Characteristic = homebridge.hap.Characteristic;
homebridge.registerAccessory("homebridge-RFReceiver", "RFReceiver", RFReceiverAccessory);
}
function RFReceiverAccessory(log, config) {
this.log = log;
// url info
this.name = config["name"];
this.rfcode = config["rfcode"] || 4;
this.window_seconds = config["window_seconds"] || 5;
this.sensor_type = config["sensor_type"] || "m";
this.inverse = config["inverse"] || false;
}
RFReceiverAccessory.prototype = {
getServices: function() {
// you can OPTIONALLY create an information service if you wish to override
// the default values for things like serial number, model, etc.
var informationService = new Service.AccessoryInformation();
informationService
.setCharacteristic(Characteristic.Name, this.name)
.setCharacteristic(Characteristic.Manufacturer, "Homebridge")
.setCharacteristic(Characteristic.Model, "RF Receiver")
.setCharacteristic(Characteristic.SerialNumber, "12345");
var service, changeAction;
if(this.sensor_type === "c"){
service = new Service.ContactSensor();
changeAction = function(newState){
service.getCharacteristic(Characteristic.ContactSensorState)
.setValue(newState ? Characteristic.ContactSensorState.CONTACT_DETECTED : Characteristic.ContactSensorState.CONTACT_NOT_DETECTED);
};
} else {
service = new Service.MotionSensor();
changeAction = function(newState){
console.log('changing state');
service.getCharacteristic(Characteristic.MotionDetected)
.setValue(newState);
};
}
function motionHandler() {
console.log('im in motionhandler func')
var newState = true;
changeAction(newState);
if(this.timer !== undefined) clearTimeout(this.timer);
this.timer = setTimeout(function(){changeAction(!newState);}, this.window_seconds * 1000);
};
var code = this.rfcode
var name = this.name
rfSniffer.on('data', function (data) {
console.log('Code received: '+data.code+' pulse length : '+data.pulseLength);
console.log(code);
if(data.code == code){
console.log("Motion Detected In" +name);
motionHandler()};
});
return [informationService, service];
}
};

firebase presence becomes more and more wrong over time

I have a simple presence user-count set up for firebase based on their example. The problem is that it relies on removing counts on disconnect. However, firebase seems to go down every 2 months and removes the ondisconnect handlers. This means that over time the counts get more and more wrong. Is there any way to fix this?
ty.Presence = function() {
this.rooms = {}
this.presence = fb.child('presence')
this.connectedRef = fb.child('.info/connected');
if (!localStorage.fb_presence_id) {
localStorage.fb_presence_id = Math.random().toString(36).slice(2)
}
this.browserID = localStorage.fb_presence_id
var first = false
}
ty.Presence.prototype.add = function(roomID, userobj) {
var self = this
var userListRef = this.presence.child(roomID)
// Generate a reference to a new location for my user with push.
var obj = {
s: "on",
id: this.browserID
}
if (userobj) {
obj.u = {
_id: userobj._id,
n: userobj.username
}
if (userobj.a) {
obj.u.a = userobj.a
}
}
var myUserRef = userListRef.push(obj)
this.rooms[roomID] = myUserRef
this.connectedRef.on("value", function(isOnline) {
if (isOnline.val()) {
// If we lose our internet connection, we want ourselves removed from the list.
myUserRef.onDisconnect().remove();
}
});
};
ty.Presence.prototype.count = function(roomID, cb) {
var self = this
var userListRef = this.presence.child(roomID)
var count = 0
function res () {
var usersArr = _.pluck(users, 'id')
usersArr = _.uniq(usersArr)
count = usersArr.length
if (cb) cb(count)
}
var users = {}
userListRef.on("child_added", function(css) {
users[css.name()] = css.val();
res()
});
userListRef.on("child_removed", function(css) {
delete users[css.name()]
res()
});
cb(count)
};
ty.Presence.prototype.get = function(ref) {
return this[ref]
};
ty.Presence.prototype.setGlobal = function(object) {
var self = this
_.each(this.rooms, function (myUserRef) {
myUserRef.set(object)
})
};
ty.Presence.prototype.remove = function(roomID) {
if (this.rooms[roomID])
this.rooms[roomID].remove();
};
ty.Presence.prototype.off = function(roomID) {
var userListRef = this.presence.child(roomID)
userListRef.off()
};
ty.presence = new ty.Presence()
ty.presence.add('all')
The onDisconnect handlers can be lost if a Firebase is restarted (e.g. when a new release is pushed live). One simple approach is to attach a timestamp as a priority to the records when they are stored. As long as the client remains online, have him update the timestamp occasionally.
setInterval(function() {
connectedRef.setPriority(Date.now());
}, 1000*60*60*4 /* every 4 hours */ );
Thus, any record which reaches, say, 24 hours old, would obviously be an orphan. A challenge could take place by clients (e.g. when a new client receives the list for the first time) or by a server process (e.g. a node.js script with a setInterval() to check for records older than X).
presenceRef.endAt(Date.now()-24*60*60*1000 /* 24 hours ago */).remove();
Less than ideal, sure, but a functional workaround I've utilized in apps.

Categories

Resources