Node.js net tcp buffering memory leak - javascript

I'm writing a TCP game server in Node.js and am having issues with splitting the TCP stream into messages. As i want to read numbers and floats from the buffer i cannot find a suitable module to outsource to as all the ones i've found deal with simple strings ending with a new line delimiter. I decided to go with prefixing each message with the length in bytes of the message. I did this and wrote a simple program to spam the server with random messages ( well constructed with a UInt16LE prefix depicting the length of the message ). I noticed that the longer I leave the programs running my actual server keeps using up more and more memory. I tried using a debugging tool to trace the memory allocation with no success so I figured i'd post my code here and hope for a reply. So here is my code... any tips or pointers as to where I'm going wrong or what I can do differently/more efficiently would be amazing!
Thanks.
server.on("connection", function(socket) {
var session = new sessionCS(socket);
console.log("Connection from " + session.address);
// data buffering variables
var currentBuffer = new Buffer(args.bufSize);
var bufWrite = 0;
var bufRead = 0;
var mSize = null;
var i = 0;
socket.on("data", function(dataBuffer) {
// check if buffer risk of overflow
if (bufWrite + dataBuffer.length > args.bufSize-1) {
var newBufWrite = 0;
var newBuffer = new Buffer(args.bufSize);
while(bufRead < bufWrite) {
newBuffer[newBufWrite] = currentBuffer[bufRead];
newBufWrite++;
bufRead++;
}
currentBuffer = newBuffer;
bufWrite = newBufWrite;
bufRead = 0;
newBufWrite = null;
}
// appending buffer
for (i=0; i<dataBuffer.length; i++) {
currentBuffer[bufWrite] = dataBuffer[i];
bufWrite ++;
}
// if beginning of message not acknowleged
if (mSize === null && (bufWrite - bufRead) >= 2) {
mSize = currentBuffer.readUInt16LE(bufRead);
}
// if difference between read and write is greater or equal to message mSize + 2
// +2 for the integer holding the message size
// this means that a full message is in the buffer and needs to be extracted
while ((bufWrite - bufRead) >= mSize+2) {
bufRead += 2;
var messageBuffer = new Buffer(mSize);
for(i=0; i<messageBuffer.length; i++) {
messageBuffer[i] = currentBuffer[bufRead];
bufRead++;
}
// this is where the message buffer would be passed to the router
router(session, messageBuffer);
messageBuffer = null;
// seeinf if another message length indicator is in the buffer
if ((bufWrite - bufRead) >= 2) {
mSize = currentBuffer.readUInt16LE(bufRead);
}
else {
mSize = null;
}
}
});
}

Buffer Frame Serialization Protocol (BUFSP) https://github.com/teambition/bufsp
It may be that you want: encode messages into buffer, write to TCP, receive and splitting the TCP stream buffers into messages.

Related

Intensive job javascript, browser freez

I got a blob to construct, and received almost 100 parts of (500k) to decrypt and construct a blob file.
Actually it's working fine, but when i do my decryption, that take processor, and freeze my page.
I try different approach, with defered of jquery, timeout but always the same probleme.
It's there a ways to not freez the UI thread ?
var parts = blobs.sort(function (a, b) {
return a.part - b.part;
})
// notre bytesarrays finales
var byteArrays = [];
i = 0;
for (var i = 0; i < blobs.length; i++)
{
// That job is intensive, and take time
byteArrays.push(that.decryptBlob(parts[i].blob.b64, fileType));
}
// create new blob with all data
var blob = new Blob(byteArrays, { type: fileType });
The body inside for(...) loop is synchronous, so the entire decryption process is synchronous, in simple words, decryption happens chunk after chunk. How about making it asynchronous ? Like decrypting multiple chunks in parallel. In JavaScript terminology we can use Asynchronous Workers. These workers can work in parallel, so if you spawn 5 workers for example. The total time is reduced by T / 5. (T = total time in synchronous mode).
Read more about worker threads here :
https://blog.logrocket.com/node-js-multithreading-what-are-worker-threads-and-why-do-they-matter-48ab102f8b10/
Tanks to Sebastian Simon,
I took the avenue of worker. And it's working fine.
var chunks = [];
var decryptedChucnkFnc = function (args) {
// My builder blob job here
}
// determine the number of maximum worker to use
var maxWorker = 5;
if (totalParts < maxWorker) {
maxWorker = totalParts;
}
for (var iw = 0; iw < maxWorker; iw++) {
eval('var w' + iw + ' = new Worker("decryptfile.min.js")');
var wo = eval("w" + iw);
var item = blobs.pop();
wo.postMessage(MyObjectPassToTheFile);
wo.onmessage = decryptedChucnkFnc;
}

WebAudio - seamlessly playing sequence of audio chunks

I have a live, constant source of waveform data that gives me a second of single-channel audio with constant sample rate every second. Currently I play them this way:
// data : Float32Array, context: AudioContext
function audioChunkReceived (context, data, sample_rate) {
var audioBuffer = context.createBuffer(2, data.length, sample_rate);
audioBuffer.getChannelData(0).set(data);
var source = context.createBufferSource(); // creates a sound source
source.buffer = audioBuffer;
source.connect(context.destination);
source.start(0);
}
Audio plays fine but with noticeable pauses between consecutive chunks being played (as expected). I'd like to get rid of them and I understand I'll have to introduce some kind of buffering.
Questions:
Is there a JS library that can do this for me? (I'm in the process of searching through them)
If there is no library that can do this, how should I do it myself?
Detecting when playback finished in one source and have another one ready to play it immediately afterwards? (using AudioBufferSourceNode.onended event handler)
Create one large buffer and copy my audio chunks one after another and control the flow using AudioBufferSourceNode.start AudioBufferSourceNode.stop functions?
Something different?
I've written a small class in TypeScript that serves as buffer for now. It has bufferSize defined for controlling how many chunks it can hold. It's short and self-descriptive so I'll paste it here. There is much to improve so any ideas are welcome.
( you can quickly convert it to JS using: https://www.typescriptlang.org/play/ )
class SoundBuffer {
private chunks : Array<AudioBufferSourceNode> = [];
private isPlaying: boolean = false;
private startTime: number = 0;
private lastChunkOffset: number = 0;
constructor(public ctx:AudioContext, public sampleRate:number,public bufferSize:number = 6, private debug = true) { }
private createChunk(chunk:Float32Array) {
var audioBuffer = this.ctx.createBuffer(2, chunk.length, this.sampleRate);
audioBuffer.getChannelData(0).set(chunk);
var source = this.ctx.createBufferSource();
source.buffer = audioBuffer;
source.connect(this.ctx.destination);
source.onended = (e:Event) => {
this.chunks.splice(this.chunks.indexOf(source),1);
if (this.chunks.length == 0) {
this.isPlaying = false;
this.startTime = 0;
this.lastChunkOffset = 0;
}
};
return source;
}
private log(data:string) {
if (this.debug) {
console.log(new Date().toUTCString() + " : " + data);
}
}
public addChunk(data: Float32Array) {
if (this.isPlaying && (this.chunks.length > this.bufferSize)) {
this.log("chunk discarded");
return; // throw away
} else if (this.isPlaying && (this.chunks.length <= this.bufferSize)) { // schedule & add right now
this.log("chunk accepted");
let chunk = this.createChunk(data);
chunk.start(this.startTime + this.lastChunkOffset);
this.lastChunkOffset += chunk.buffer.duration;
this.chunks.push(chunk);
} else if ((this.chunks.length < (this.bufferSize / 2)) && !this.isPlaying) { // add & don't schedule
this.log("chunk queued");
let chunk = this.createChunk(data);
this.chunks.push(chunk);
} else { // add & schedule entire buffer
this.log("queued chunks scheduled");
this.isPlaying = true;
let chunk = this.createChunk(data);
this.chunks.push(chunk);
this.startTime = this.ctx.currentTime;
this.lastChunkOffset = 0;
for (let i = 0;i<this.chunks.length;i++) {
let chunk = this.chunks[i];
chunk.start(this.startTime + this.lastChunkOffset);
this.lastChunkOffset += chunk.buffer.duration;
}
}
}
}
You don't show how audioChunkReceived, but to get seamless playback, you have to make sure you have the data before you want to play it and before the previous one stops playing.
Once you have this, you can schedule the newest chunk to start playing when the previous one ends by calling start(t), where t is the end time of the previous chunk.
However, if the buffer sample rate is different from the context.sampleRate, it's probably not going to play smoothly because of the resampling that is needed to convert the buffer to the context rate.
I think it is because you allocate your buffer for 2 channel.
change that to one.
context.createBuffer(2, data.length, sample_rate);
to
context.createBuffer(1, data.length, sample_rate);

postMessage webworker memory leak

I'm using a webworker to pass some data at an interval of 10 ms. In the task manager I can see the working memory set increasing till I don't cancel the interval.
Here's what I'm doing:
Sending:
function send() {
setInterval(function() {
const data = {
array1: get100Arrays(),
array2: get500Arrays()
};
let json = JSON.stringify( data );
let arbfr = str2ab (json);
worker.postMessage(arbfr, [arbfr]);
}, 10);
}
function str2ab(str) {
var buf = new ArrayBuffer(str.length*2); // 2 bytes for each char
var bufView = new Uint16Array(buf);
for (var i=0, strLen=str.length; i<strLen; i++) {
bufView[i] = str.charCodeAt(i);
}
return buf;
}
I also tried to do only this, but with no success:
// let json = JSON.stringify( data );
// let arbfr = str2ab(json);
worker.postMessage(data);
Anyone know why this might be leaking? I'm currently trying this on Chrome.
Usually memory leaks in web workers are created while passing values multiple times between the main thread and the worker thread.
If possible try to send the array to the web worker only once.
You can also detect if your Transferable Objects work properly (The array will be neutered)
var ab = new ArrayBuffer(1);
try {
worker.postMessage(ab, [ab]);
if (ab.byteLength) {
console.log('TRANSFERABLE OBJECTS are not supported in your browser!');
}
else {
console.log('USING TRANSFERABLE OBJECTS');
}
}
catch(e) {
console.log('TRANSFERABLE OBJECTS are not supported in your browser!');
}

Converting Uint8Array crashing browser for large files

Have an app where there is an input of type "file". The following methods grab the file, then prep it to be sent to the server via AJAX.
private StartUpload = (files) => {
if (files && files.length === 1) {
this.GetFileProperties(files[0])
.done((properties: IFileProperties) => {
$('input[type=file]').val("");
if (this._compatibleTypes.indexOf(properties.Extension) >= 0) {
var base64 = this.ArrayBufferToBase64(properties.ArrayBuffer);
this.DoFileUpload(base64, properties.Extension).always(() => {
this.ShowDialogMessage('edit_document_upload_complete', 'edit_document_upload_complete');
});
} else {
this.ShowDialogMessage('edit_document_upload_incompatible', 'edit_document_upload_compatible_types', this._compatibleTypes);
}
});
} else {
this.ShowDialogMessage('edit_document_upload_one_file', 'edit_document_upload_one_file_msg');
}
};
private ArrayBufferToBase64(buffer): any {
var binary = '';
var bytes = new Uint8Array(buffer);
for (var xx = 0, len = bytes.byteLength; xx < len; xx++) {
binary += String.fromCharCode(bytes[xx]);
}
return window.btoa(binary);
}
private DoFileUpload = (base64, extension) => {
this.IsLoading(true);
var dfd = $.Deferred();
var data = {
data: base64
};
UpdateFormDigest((<any>window)._spPageContextInfo.webServerRelativeUrl, (<any>window)._spFormDigestRefreshInterval);
var methodUrl = "_vti_bin/viewfile/FileInformation.asmx/AddScannedItemAlt";
$.ajax({
headers: {
"X-RequestDigest": $("#__REQUESTDIGEST").val()
},
url: methodUrl,
contentType: "application/json",
data: JSON.stringify(data),
dataType: 'json',
type: "POST",
success: (response) => {
// do stuff
},
error: (e) => {
// do stuff
}
});
return dfd;
};
This works perfectly in the vast majority of cases. However, when the file size is large (say 200MB+) it kills the browser.
Chrome shows a blackish-grey page with the "aw snap" message and basically dies.
IE shows an "Out of Memory" console error but continues to work.
FF shows an "Unresponsive script" warning. Choosing "don't show me again" lets it run until an "out of memory" console error shows up.
This is where it dies:
for (var xx = 0, len = bytes.byteLength; xx < len; xx++) {
binary += String.fromCharCode(bytes[xx]);
}
Wrapping a try/catch around this does nothing and no error is caught.
I can step into the loop without a crash, but stepping through every iteration is tough since len = 210164805. For this I tried to add console.log(xx) to the loop and let it fly - but the browser crashes before anything shows up in the log.
Is there some limit to the size a string can be that could be causing the browser to crash once exceeded?
Thanks
You need to do this in an asynchronous way by breaking up the code either in blocks or time segments.
This means your code will need to use callback, but otherwise it's straight forward -
Example
var bytes = new Uint8Array(256*1024*1024); // 256 mb buffer
convert(bytes, function(str) { // invoke the process with a callback defined
alert("Done!");
});
function convert(bytes, callback) {
var binary = "", blockSize = 2*1024*1024, // 2 mb block
block = blockSize, // block segment
xx = 0, len = bytes.byteLength;
(function _loop() {
while(xx < len && --block > 0) { // copy until block segment = 0
binary += String.fromCharCode(bytes[xx++]);
}
if (xx < len) { // more data to copy?
block = blockSize; // reinit new block segment
binary = ""; // for demo to avoid out-of-memory
setTimeout(_loop, 10); // KEY: async wait
// update a progress bar so we can see something going on:
document.querySelector("div").style.width = (xx / len) * 100 + "%";
}
else callback(binary); // if done, invoke callback
})(); // selv-invoke loop
}
html, body {width:100%;margin:0;overflow:hidden}
div {background:#4288F7;height:10px}
<div></div>
Using large buffers converted to string will possibly make the client run out of memory. A buffer of 200mb converted to string will add 2 x 200mb as strings are stored as UTF-16 (ie. 2 bytes per char), so here we use 600 mb out of the box.
It depends on browser and how it deals with memory allocations as well as the system of course. The browser will try to protect the computer against malevolent scripts which would attempt to fill up the memory for example.
You should be able to stay in ArrayBuffer and send that to server.

Measure Node.js total inbound/outbound traffic generated through net.server

I got that nifty App I work on, now to calculate some scenarios I'd like to know
how much inbound / outbound traffic is generated per session by the app itself.
I don't want to use the browser for this but to gather that info from the server side.
nodejs net.server does not have any methods that fit the description,
I found only net.socket methods:
socket.bytesRead The amount of received bytes.
socket.bytesWritten The amount of bytes sent.
do they apply to the traffic generated through net.server?
any existing node_modules that gather that kind of stats?
Thanks in advance
Well, that's because net.server uses net.socket. To get the totals, you will have to add bytesRead and bytesWritten to the totals once the socket is closed. Example:
const net = require("net");
var server = net.createServer(function (c) {
c.on('close', function () {
// add to the totals
server.bytesSent += c.bytesWritten;
server.bytesReceived += c.bytesRead;
});
c.write('Hello world!\r\n');
c.pipe(c);
c.end();
});
server.bytesReceived = 0;
server.bytesSent = 0;
server.listen(3000);
var time = process.hrtime();
setInterval(function (){
process.stdout.write('\u001B[2J\u001B[0;0f');
var diff = process.hrtime(time)[0] + process.hrtime(time)[1]/1000000000;
var bpsSent = Math.round(server.bytesSent/diff) || 0;
var bpsReceived = Math.round(server.bytesReceived/diff) || 0;
console.log("Running node.js %s on %s-%s", process.version, process.platform, process.arch);
console.log("Memory usage: %d bytes", process.memoryUsage().rss);
console.log("Uptime: %ds", Math.round(process.uptime()));
console.log("Open connections: %d", server.connections);
console.log("In: %d bytes (%d bytes/s)", server.bytesReceived, bpsReceived);
console.log("Out: %d bytes (%d bytes/s)", server.bytesSent, bpsSent);
}, 100);
If you need to update the totals in real-time (when data is received/sent), you can instead add the length of the buffers directly to the totals when they are written/read. This is especially good when you have sockets that are open for a long time and transferring large amounts of data.
var server = net.createServer(function (c) {
var oldWrite = c.write;
c.write = function(d) {
if (!Buffer.isBuffer(d)) {
d = new Buffer(d);
}
oldWrite.call(this, d);
server.bytesSent += d.length;
};
c.on('data', function(d){
server.bytesReceived += d.length;
});
c.write('Hello world!\r\n');
c.pipe(c);
c.end();
});

Categories

Resources