JSON array to nodejs Readable object stream - javascript

I have an event that fires every time a packet is received from a bluetooth device. The packet comes in as a JSON array and always contains 10 objects similar to the ones below.
var s1_data = [{"record":0,"sensor":1,"timestamp":26566,"date":{"day":7,"hour":10,"minute":45,"month":5,"second":38,"year":18}},
{"record":1,"sensor":1,"timestamp":26567,"date":{"day":7,"hour":10,"minute":45,"month":5,"second":38,"year":18}},
{"record":2,"sensor":1,"timestamp":26568,"date":{"day":7,"hour":10,"minute":45,"month":5,"second":38,"year":18}},
{"record":3,"sensor":1,"timestamp":26569,"date":{"day":7,"hour":10,"minute":45,"month":5,"second":38,"year":18}},
{"record":4,"sensor":1,"timestamp":26570,"date":{"day":7,"hour":10,"minute":45,"month":5,"second":38,"year":18}},
{"record":5,"sensor":1,"timestamp":26571,"date":{"day":7,"hour":10,"minute":45,"month":5,"second":38,"year":18}},
{"record":6,"sensor":1,"timestamp":26572,"date":{"day":7,"hour":10,"minute":45,"month":5,"second":38,"year":18}},
{"record":7,"sensor":1,"timestamp":26573,"date":{"day":7,"hour":10,"minute":45,"month":5,"second":38,"year":18}},
{"record":8,"sensor":1,"timestamp":26574,"date":{"day":7,"hour":10,"minute":45,"month":5,"second":38,"year":18}},
{"record":9,"sensor":1,"timestamp":26575,"date":{"day":7,"hour":10,"minute":45,"month":5,"second":38,"year":18}}]
I need to push each individual object into a node.js stream that is being consumed by another stream.
Can anyone give an example on how I can push these to a Readable object stream on an ongoing basis (or do I need to use a PassThrough stream)? I can’t seem to get my head around how to do this!
Edited to add sample code below. chunk.toString() fails as chunk is undefined
var Readable = require('stream').Readable;
var util = require('util');
var through2 = require('through2');
var s1 = require('./sensor1.js');
var s2 = require('./sensor2.js');
var minLength = s1.length;
//console.log(s1);
var s1stream = new Readable({ objectMode: true });
var s2stream = new Readable({ objectMode: true });
s1stream._read = function () {
this.push();
};
s2stream._read = function () {
this.push();
};
if (s2.length < minLength){
minLength = s2.length;
}
var n1 = 0;
setInterval(function() {
if (n1++ < minLength) {
console.log(s1[n1].record);
s1stream.push(s1[n1]);
} else if (n1++ === minLength) {
s1stream.push(null);
}
}, 1000);
var n2 = 0;
setInterval(function() {
if (n2++ < minLength) {
s2stream.push(s2[n2]);
} else if (n2++ === minLength) {
s2stream.push(null);
}
}, 1000);
s1stream.pipe(through2.obj(function (chunk, enc, callback) {
this.push(chunk.toString())
callback()
})).pipe(process.stdout);
Edit below shows working code. Looks like it was to do with the way I was creating the read functions
var Readable = require('stream').Readable;
var util = require('util');
var through2 = require('through2');
var Transform = require('stream').Transform;
var s1 = require('./sensor1.js');
var s2 = require('./sensor2.js');
var minLength = s1.length;
//console.log(s1);
var s1stream = new Readable({
objectMode: true,
read() {}
})
const s2stream = new Readable({
objectMode: true,
read() {}
})
if (s2.length < minLength){
minLength = s2.length;
}
var n1 = 0;
setInterval(function() {
if (n1 < minLength) {
s1stream.push(s1[n1]);
n1++;
} else if (n1 == minLength) {
s1stream.push(null);
}
}, 1000);
var n2 = 0;
setInterval(function() {
if (n2++ < minLength) {
s2stream.push(s2[n2]);
} else if (n2++ === minLength) {
s2stream.push(null);
}
}, 1000);
var jsonStream = through2.obj(function(file, encoding, cb) {
this.push(file.record.toString())
cb()
})
s1stream.pipe(jsonStream).pipe(process.stdout);

You will need to implement a Readable stream that "wraps" your bluetooth source. When the packet received event is fired you'll loop through your array of objects and call your Readable stream's push method, passing in each object. A somewhat similar example of this can be seen here
Note that since you'll be sending actual JSON objects you'll want to enable the objectMode option when you write your implementation as described here

Related

How to get xmlhttprequest results in a for loop

Good day all,
Please i'm trying to get the latency values of a ping using xmlhttprequest in a for loop for five consecutive latencies that will be stored in an global array in a react native application, after which when the for loop finishes, other codes can now execute, but it seems that the whole code just runs through without getting the array values from the initial for loop and the final result comes out as 0, then later, I start getting the array values due to the result of the xmlhttprequest. How can I ensure that I get the latency results first before now executing the remaining code. My code is below:
let latencies = [];
class App extends Component {
startScan = () => {
this.setState({
scanning: true,
});
this.getJitter();
}
getPing = () => {
var request = new XMLHttpRequest();
var startTime = new Date();
request.open(
'GET',
'http://dummy.restapiexample.com/api/v1/employees',
true,
);
request.send();
request.onreadystatechange = (e) => {
if (request.readyState == 4 && request.status == 200) {
var endTime = new Date();
var ping = endTime.getTime() - startTime.getTime();
this.setState({ping: ping});
latencies.push(ping);
console.log('ping:', ping);
console.log(latencies);
return ping;
}
};
};
getJitter = () => {
for(var i=0; i<5; i++){
this.getPing();
}
//Get each latency difference
var total1 = 0;
for (var i = 0; i < latencies.lenght; i++) {
if (typeof latencies[i] === 'number') {
console.log(latencies[i]);
total1 += latencies[i + 1] - latencies[i];
console.log(total1);
}
}
var jitter = total1 / (latencies.length - 1);
console.log(jitter); //this comes out as 0
latencies = [];
};
render() {
return (
...
<Button title="Scan" onPress={this.startScan} />
)
};
}
Thanks
Tim
The problem is that the XMLHttpRequest is asynchronous and will allow other code to run while it is trying to fetch the resource. The solution to this is to await each request and then move on to the next block.
I've switched the XMLHttpRequest for the Fetch API. fetch returns a Promise which you can await and wait for it to finish.
class App extends Component {
startScan = () => {
this.setState({
scanning: true,
});
this.getJitter().then(() => {
this.setState({
scanning: false,
});
});
}
getPing = async () => {
const startTime = new Date();
await fetch('http://dummy.restapiexample.com/api/v1/employees');
const endTime = new Date();
const ping = endTime.getTime() - startTime.getTime();
this.setState({ ping });
return ping;
}
getJitter = async () => {
const latencies = [];
for (let i = 0; i < 5; i++){
const ping = await this.getPing();
latencies.push(ping);
}
//Get each latency difference
let total1 = 0;
for (let i = 0; i < latencies.length; i++) {
if (typeof latencies[i] === 'number') {
console.log(latencies[i]);
total1 += latencies[i + 1] - latencies[i];
console.log(total1);
}
}
const jitter = total1 / (latencies.length - 1);
console.log(jitter); //this comes out as 0
};
render() {
return (
<Button title="Scan" onPress={this.startScan} />
)
}
}

Nodejs: Attach Event Listener to Array If contains value

in my project I have four objects (connectors to HW decoders), which are obviously synchronous. And array (queue) of strings which I get to decryption. Is there anyway how to check if the array contains some values from Decoder objects? I think I can do it via EventEmitters, but I don't know how.
Thanks for help
-Update
Incoming value from another server is 64bites string, decoder returns 15 bites value. For communication via serial port I'm using this package SerialPort. I know how to the communication just with one decoder, but don't know how with more (in proper way).
My code for communication with one Decoder.
var SerialPort = require('serialport');
var ports = [];
ports.push( {device: new SerialPort('/dev/cu.usbmodem1411', { autoOpen: false }), enable: true});
ports.push( {device: new SerialPort('/dev/cu.usbmodem1421', { autoOpen: false }), enable: true});
//id: incoming encrypted value
module.exports.decryption = function (id, callback) {
ports[0].enable = false;
var idArray = [];
idArray[0] = id.slice(0, 16);
idArray[1] = id.slice(16, 32);
idArray[2] = id.slice(32, 48);
idArray[3] = id.slice(48, 64);
ports[0].device.open(function (err) {
if (err) {
return console.log('Error opening port: ', err.message);
}
port[0].device.write('d01');
let index = 0;
var buffer = setInterval(function () {
port[0].device.write(idArray[index]);
if (index === idArray.length - 1) clearInterval(buffer);
index++;
}, 1);
});
ports[0].device.on('data', function (data) {
callback( hex2a(data.toString('ascii')));
ports[0].device.close(function (response) {
console.log("device disconnected")
},function (error) { });
ports[0].enable = true;
});
};
function hex2a(hexx) {
var hex = hexx.toString();//force conversion
var str = '';
for (var i = 0; i < hex.length; i += 2)
str += String.fromCharCode(parseInt(hex.substr(i, 2), 16));
return str;
}

How to log contents of HTML5 drag and drop file that is 60MB+ without hanging for minutes?

I have a file that i want to drop on a page and read file contents. its a CSV with 9 columns. My drop command outputs file contents like this:
function drop(ev) {
ev.preventDefault();
var data = ev.dataTransfer.files[0];
var fileReader = new FileReader();
fileReader.onload = function (e) {
console.log(fileReader.result)
};
fileReader.onerror = function (e) {
throw 'Error reading CSV file';
};
// Start reading file
fileReader.readAsText(data);
return false;
}
When I drag and drop a simple file that is a couple kilobytes or 1MB, I can see the output of the contents of the file. However given a large CSV file, it takes many many minutes before it shows up. Is there a way to make it so that there is some streaming maybe where it does not look like its hanging?
With Screw-FileReader
You can get a ReadableStream and do it in a streaming fashion
'use strict'
var blob = new Blob(['111,222,333\naaa,bbb,ccc']) // simulate a file
var stream = blob.stream()
var reader = stream.getReader()
var headerString = ''
var forEachLine = function(row) {
var colums = row.split(',')
// append to DOM
console.log(colums)
}
var pump = function() {
return reader.read().then(function(result) {
var value = result.value
var done = result.done
if (done) {
// Do the last line
headerString && forEachLine(headerString)
return
}
for (var i = 0; i < value.length; i++) {
// Get the character for the current iteration
var char = String.fromCharCode(value[i])
// Check if the char is a new line
if (char.match(/[^\r\n]+/g) !== null) {
// Not a new line so lets append it to
// our header string and keep processing
headerString += char
} else {
// We found a new line character
forEachLine(headerString)
headerString = ''
}
}
return pump()
})
}
pump().then(function() {
console.log('done reading the csv')
})
<script src="https://cdn.rawgit.com/jimmywarting/Screw-FileReader/master/index.js"></script>
If you prefer using the old FileReader without dependencies, pipe's and transform
'use strict'
var blob = new Blob(['111,222,333\naaa,bbb,ccc']) // simulate a file
var fr = new FileReader()
var headerString = ''
var position = 0
var forEachLine = function forEachLine(row) {
var colums = row.split(',')
// append to DOM
console.log(colums)
}
var pump = function pump() {
return new Promise(function(resolve) {
var chunk = blob.slice(position, position + 524288)
position += 524288
fr.onload = function() {
var value = fr.result
var done = position >= blob.size
for (var i = 0; i < value.length; i++) {
var char = value[i]
// Check if the char is a new line
if (char.match(/[^\r\n]+/g) !== null) {
// Not a new line so lets append it to
// our header string and keep processing
headerString += char
} else {
// We found a new line character
forEachLine(headerString)
headerString = ''
}
}
if (done) {
// Send the last line
forEachLine(headerString)
return resolve() // done
}
return resolve(pump())
}
// Read the next chunk
fr.readAsText(chunk)
})
}
pump().then(function() {
console.log('done reading the csv')
})

How to handle modules in an asynchronous app in node.js

for (var i = 0; i < users.length; i++) {
if (users[i].access_token !== undefined) {
var syncer = require('dropsite_server/dbox_sync');
var client = dboxApp.client(users[i].access_token);
var websites_to_sync = [];
syncer.doSync(client, users[i].website, users[i].access_token.uid);
}
}
In syner.doSync I have some asynchronous function calls. And yes for performance reasons I want to keep them asynchronous. The issue is, that the syncer object is just a reference, so while doSync is working, the for-loop continues to run, which then changes the variables.
Right now the surrounding for loop has only two elements, result is, only the last element gets processed, but not only once, but actually twice.
The solution would be to make syncer a proper object, but somehow I fail. To get a better understanding of what the code in syncer looks like, here the beginning:
/**
* This here is the sync module. User needs to provide. All this module does,
* is sync from a users dropbox to a local path
*/
var dbox = require("dbox");
var fs = require("fs");
var async = require("async");
var allow_downloads = true;
var allow_local_deletes = true;
var client = null;
var saved_sync_data = null;
var sync_data_path = global.config.sync_data_file;
var uid = null;
var remote_sync_dirs = null;
//var sync_data_file = ".dropbox_sync_data";
var errors = [];
var queue = async.queue(doTask, 1);
exports.doSync = function (clientIn, website_domain, _uid) {
client = clientIn;
uid = _uid;
sync_data_path = sync_data_path + "_" + uid;
remote_sync_dirs = website_domain;
async.series(
[
readSyncDataFile,
startSync
]);
}
/**
* Start the Sync
* #param dbox client This is a dbox client
*/
function startSync() {
console.log("get remote delta for website: " + remote_sync_dirs)
getRemoteDelta();
}
var declarations are not scoped to loops, and should be made at the top of file/function
var syncer = require('dropsite_server/dbox_sync')
, client
, websites_to_sync = [] //what is this used for?
for (var i = 0; i < users.length; ++i) {
if (users[i].access_token !== undefined) {
client = dboxApp.client(users[i].access_token)
syncer.doSync(client, users[i].website, users[i].access_token.uid);
}
}
The reason the last item gets processed twice is that the doSync function sets module level variables that get overwritten each time you call it.
The way to fix this is to pass the variables to the function instead
exports.doSync = function (client, website_domain, uid) {
sync_data_path = sync_data_path + "_" + uid
remote_sync_dirs = website_domain
async.series([
readSyncDataFile.bind(null, client, website_domain, uid)
, startSync.bind(null, client, website_domain, uid)
])
}
function startSync(client, website_domain, uid) {
...
}
function readSyncDataFile(client, website_domain, uid) {
...
}

How to return array from JavaScript function that retrieves data from text file?

I am building a Windows 8 Store app with HTML/CSS/JavaScript. I am reading in data from a text file through a function, and then putting that data into an array. I am trying to return the array through the function, but it is not working. Any help would be greatly appreciated. I've attached my code snippet.
// Load user data
var DefineUserData = function LoadUserData() {
return Windows.Storage.ApplicationData.current.localFolder.getFileAsync(loadfile).done(function (UserFile) {
return Windows.Storage.FileIO.readTextAsync(UserFile).done(function (fileResult) {
var userdata = new Object();
var dataobject = {};
var innercount;
var outercount;
var fileResultByLines = fileResult.split("\n");
for (outercount = 0; outercount <= (fileResultByLines.length - 2) ; outercount++) {
var tempArray = fileResultByLines[outercount].split(",");
dataobject.metrictitle = tempArray[0];
dataobject.numinputs = tempArray[1];
dataobject.inputs = new Array();
for (innercount = 0; innercount <= parseInt(dataobject.numinputs) ; innercount++) {
dataobject.inputs[innercount] = tempArray[innercount + 2];
}
userdata[outercount] = dataobject;
}
return userdata;
});
},
function (errorResult) {
document.getElementById("resbutton1").innerText = errorResult;
})
}
Your DefineUserData function is returning a Promise, not a value. Additionally done functions don't return anything. Instead you'll need to use then functions instead of done functions in DefineUserData and then handle add a done function (or then) to the code that calls this function.
Also, You can make your promises easier to read, and easier to work with by chaining then functions instead of nesting them.
Currently on Win7 at the office so I can't test this, but try something similar to this pseudo-code. Note then functions instead of done. The last then returns your data. Sample snippet afterwards to illustrate calling this and handling the result.
// modified version of yours
var DefineUserData = function LoadUserData() {
return Windows.Storage.ApplicationData.current.localFolder
.getFileAsync(loadfile)
.then(function (UserFile) {
return Windows.Storage.FileIO.readTextAsync(UserFile);
}).then(function (fileResult) {
var userdata = new Object();
var dataobject = {};
var innercount;
var outercount;
var fileResultByLines = fileResult.split("\n");
for (outercount = 0; outercount <= (fileResultByLines.length - 2) ; outercount++) {
var tempArray = fileResultByLines[outercount].split(",");
dataobject.metrictitle = tempArray[0];
dataobject.numinputs = tempArray[1];
dataobject.inputs = new Array();
for (innercount = 0; innercount <= parseInt(dataobject.numinputs) ; innercount++) {
dataobject.inputs[innercount] = tempArray[innercount + 2];
}
userdata[outercount] = dataobject;
}
return userdata;
},
function (errorResult) {
document.getElementById("resbutton1").innerText = errorResult;
});
}
// some other code...
DefineUserData.done(function (userdata) {
// do something
});

Categories

Resources