how to receive data from bluetooth device using node.js - javascript

I am new to javascript and node.js. Currently am working in medical project. First i will explain my work. I have to receive data from Bluetooth device (normal BP rate ,pulse rate ) and display the readings in the web app using node.js. I don't know how to receive data from Bluetooth device (patient monitor machine) can you guys suggest me some blogs or books to read. Thanks in advance.

You can use "node-bluetooth" to send and receive data from and to a device respectively. This is a sample code:-
const bluetooth = require('node-bluetooth');
// create bluetooth device instance
const device = new bluetooth.DeviceINQ();
device
.on('finished', console.log.bind(console, 'finished'))
.on('found', function found(address, name) {
console.log('Found: ' + address + ' with name ' + name);
device.findSerialPortChannel(address, function(channel) {
console.log('Found RFCOMM channel for serial port on %s: ', name, channel);
// make bluetooth connect to remote device
bluetooth.connect(address, channel, function(err, connection) {
if (err) return console.error(err);
connection.write(new Buffer('Hello!', 'utf-8'));
});
});
// make bluetooth connect to remote device
bluetooth.connect(address, channel, function(err, connection) {
if (err) return console.error(err);
connection.on('data', (buffer) => {
console.log('received message:', buffer.toString());
});
connection.write(new Buffer('Hello!', 'utf-8'));
});
}).inquire();
It scans for the device name given in "device" variable.

Try noble library. That's how I get information about my Xiaomi Mi Band 3 device:
const arrayBufferToHex = require('array-buffer-to-hex')
const noble = require('noble')
const DEVICE_INFORMATION_SERVICE_UUID = '180a'
noble.on('stateChange', state => {
console.log(`State changed: ${state}`)
if (state === 'poweredOn') {
noble.startScanning()
}
})
noble.on('discover', peripheral => {
console.log(`Found device, name: ${peripheral.advertisement.localName}, uuid: ${peripheral.uuid}`)
if (peripheral.advertisement.localName === 'Mi Band 3') {
noble.stopScanning()
peripheral.on('connect', () => console.log('Device connected'))
peripheral.on('disconnect', () => console.log('Device disconnected'))
peripheral.connect(error => {
peripheral.discoverServices([DEVICE_INFORMATION_SERVICE_UUID], (error, services) => {
console.log(`Found service, name: ${services[0].name}, uuid: ${services[0].uuid}, type: ${services[0].type}`)
const service = services[0]
service.discoverCharacteristics(null, (error, characteristics) => {
characteristics.forEach(characteristic => {
console.log(`Found characteristic, name: ${characteristic.name}, uuid: ${characteristic.uuid}, type: ${characteristic.type}, properties: ${characteristic.properties.join(',')}`)
})
characteristics.forEach(characteristic => {
if (characteristic.name === 'System ID' || characteristic.name === 'PnP ID') {
characteristic.read((error, data) => console.log(`${characteristic.name}: 0x${arrayBufferToHex(data)}`))
} else {
characteristic.read((error, data) => console.log(`${characteristic.name}: ${data.toString('ascii')}`))
}
})
})
})
})
}
})

You can use node-ble a Node.JS library that leverages on D-Bus and avoids C++ bindings.
https://github.com/chrvadala/node-ble
Here a basic example
async function main () {
const { bluetooth, destroy } = createBluetooth()
// get bluetooth adapter
const adapter = await bluetooth.defaultAdapter()
await adapter.startDiscovery()
console.log('discovering')
// get device and connect
const device = await adapter.waitDevice(TEST_DEVICE)
console.log('got device', await device.getAddress(), await device.getName())
await device.connect()
console.log('connected')
const gattServer = await device.gatt()
// read write characteristic
const service1 = await gattServer.getPrimaryService(TEST_SERVICE)
const characteristic1 = await service1.getCharacteristic(TEST_CHARACTERISTIC)
await characteristic1.writeValue(Buffer.from('Hello world'))
const buffer = await characteristic1.readValue()
console.log('read', buffer, buffer.toString())
// subscribe characteristic
const service2 = await gattServer.getPrimaryService(TEST_NOTIFY_SERVICE)
const characteristic2 = await service2.getCharacteristic(TEST_NOTIFY_CHARACTERISTIC)
await characteristic2.startNotifications()
await new Promise(done => {
characteristic2.once('valuechanged', buffer => {
console.log('subscription', buffer)
done()
})
})
await characteristic2.stopNotifications()
destroy()
}

Related

Node / React - How to send data only to specific users using sse?

I want to send notifications to all and only those users who are following certain items. When user reports item, this triggers a notification of report to all followers of item being reported. What happens in my case:
lets say 3 people follow item xyz. I send notification 'xyz was reported' to 3 users, however, each user gets 3 times that notification instead of 3 users 1x notification.
Do I need to establish a connection with each user seperatly by using his userId in the sse event? Or is there another better solution?
my backend:
const SSE = require("express-sse");
const sse = new SSE(["test"], { isSerialized: false, initialEvent: 'initialize sse' });
...
reportItem: async (req, res) => {
...report item
await NotificationService.notifyOfItemUpdate(id, report.data);
...
}
const NotificationService = {
notifyOfItemUpdate: async (updatedItemId, report) => {
const item = await Item.findById({_id: updatedItemId});
const message = NotificationService.createItemUpdateMessage(item, report);
const followers = await User.find({following: updatedItemId});
await NotificationService.notify(followers, message);
},
notify: async (followers, message) => {
return Promise.all(followers.map(async (follower) => {
const notification = await NotificationService.createNotification(follower._id, message);
if (follower._id.toString() === notification.to.toString()) {
sse.send(notification.messages[0], 'new_notification'); //[0] as sending the last message
}
}));
}
}
client
const eventSource = new EventSource(url, {
headers: {
Authorization: {
toString: function () {
return "Bearer " + token;
},
},
},
});
eventSource.addEventListener('open', (e) => {
console.log('SSE opened!');
});
eventSource.addEventListener(`new_notification${}`, (e) => {
const notifications = JSON.parse(e.data);
dispatch({type: 'STREAM_NOTIFICATIONS_SUCCESS', payload: notifications})
});
eventSource.addEventListener('error', (e) => {
console.error('Error: ', e);
});
return () => {
eventSource.close();
};

Broadcasting to all clients with Deno websocket

I want to add notifications to an application I've developed.
Unfortunately, Deno has removed the ws package.(https://deno.land/std#0.110.0/ws/mod.ts)
That's why I'm using the websocket inside the denon itself. Since it doesn't have many functions, I have to add some things myself.
For example, sending all messages to open clients.
What I want to do is when the pdf is created, a (data, message) comes from the socket and update the notifications on the page according to the incoming data.
I keep all open clients in a Map. and when the pdf is created, I return this Map and send it to all sockets (data, message).
However, this works for one time.
server conf...
import {
path,
paths,
ctid,
} from "../deps.ts";
const users = new Map();
const sockets = new Map()
const userArr = [];
export const startNotif = (socket,req) => {
const claims = req.get("claims");
const org = req.get("org");
claims.org = org;
console.log("connected")
users.set(claims.sub, {"username":claims.sub,"socket":socket})
users.forEach((user)=>{
if(userArr.length === 0){
userArr.push(user)
}
else if(userArr.every((w)=> w.username !== user.username) )
userArr.push(user)
})
sockets.set(org, userArr)
function broadcastMessage(message) {
sockets.get(org).map((u)=>{
console.log(u.socket.readyState)
u.socket.send(message)
})
}
if (socket.readyState === 3) {
sockets.delete(uid)
return
}
const init = (msg) => {
socket.send(
JSON.stringify({
status: "creating",
})
);
};
const ondata = async (msg) => {
const upfilepath = path.join(paths.work, `CT_${msg.sid}_report.pdf`);
try {
const s=await Deno.readTextFile(upfilepath);
if(s){
socket.send(
JSON.stringify({
status: "end",
})
);
} else {
socket.send(
JSON.stringify({
status: "creating",
})
);
}
} catch(e) {
if(e instanceof Deno.errors.NotFound)
console.error('file does not exists');
}
};
const end = () => {
try {
const endTime = Date.now()
const msg = "Your PDF has been created"
const id = ctid(12) // random id create
broadcastMessage(
JSON.stringify({
id: id,
date: endTime,
status: "done",
message: msg,
read: 'negative',
action: 'pdf'
})
);
} catch (e) {
console.log(400, "Cannot send.", e);
}
}
socket.onmessage = async (e) => {
const cmd = JSON.parse(e.data);
if(cmd.bid === 'start'){
await init(cmd)
}
if(!cmd.bid && cmd.sid){
await ondata(cmd)
}
if(cmd.bid === 'end'){
await end();
}
}
socket.onerror = (e) => {
console.log(e);
};
}
client conf...
export const webSocketHandler = (request) =>
new Promise((res, rej) => {
let url;
if (!process.env.NODE_ENV || process.env.NODE_ENV === 'development') {
url = `http://localhost:8080/api/notifications/ws`.replace('http', 'ws');
} else {
url = `${window.location.origin}/api/notifications/ws`.replace('http', 'ws');
}
const token = JSON.parse(sessionStorage.getItem('token'));
const orgname = localStorage.getItem('orgname');
const protocol = `${token}_org_${orgname}`;
const socket = new WebSocket(url, protocol);
const response = Object.create({});
socket.onopen = function () {
socket.send(
JSON.stringify({
bid: 'start',
})
);
};
socket.onmessage = function (event) {
response.data = JSON.parse(event.data);
if (response.data.status === 'creating') {
socket.send(
JSON.stringify({
sid: request.sid,
})
);
} else if (response.data.status === 'end') {
socket.send(
JSON.stringify({
bid: 'end',
})
);
} else if (response.data.status === 'done') {
try {
res(response);
} catch (err) {
rej(err);
}
}
};
socket.onclose = function (event) {
response.state = event.returnValue;
};
socket.onerror = function (error) {
rej(error);
};
});
onclick function of button I use in component...
const donwloadReport = async (type) => {
const query = `?sid=${sid}&reportType=${type}`;
const fileName = `CT_${sid}_report.${type}`;
try {
type === 'pdf' && setLoading(true);
const response = await getScanReportAction(query);
const request = {
sid,
};
webSocketHandler(request)
.then((data) => {
console.log(data);
dispatch({
type: 'update',
data: {
id: data.data.id,
date: data.data.date,
message: data.data.message,
action: data.data.action,
read: data.data.read,
},
});
})
.catch((err) => {
console.log(err);
});
if (type === 'html') {
downloadText(response.data, fileName);
} else {
const blobUrl = await readStream(response.data);
setLoading(false);
downloadURL(blobUrl, fileName);
}
} catch (err) {
displayMessage(err.message);
}
};
Everything works perfectly the first time. When I press the download button for the pdf, the socket works, then a data is returned and I update the notification count with the context I applied according to this data.
Later I realized that this works in a single tab. When I open a new client in the side tab, my notification count does not increase. For this, I wanted to keep all sockets in Map and return them all and send a message to each socket separately. But in this case, when I press the download button for the second time, no data comes from the socket.
Actually, I think that I should do the socket initialization process on the client in the context. When you do this, it starts the socket 2 times in a meaningless way.
In summary, consider an application with organizations and users belonging to those organizations. If the clients of A, B, C users belonging to X organization are open at the same time and user A pressed a pdf download button, I want A, B, C users to be notified when the pdf is downloaded.
I would be very grateful if someone could show me a way around this issue.
Have you looked at the BroadcastChannel API? Maybe that could solve your issue. See for example:
Deno specific: https://medium.com/deno-the-complete-reference/broadcast-channel-in-deno-f76a0b8893f5
Web/Browser API: https://developer.mozilla.org/en-US/docs/Web/API/Broadcast_Channel_API

DynamoDB update does not console.log any output

I have the following code. This code is supposed to receive an SQS message, read the body, then update a dynamo record with the information contained within that body. The update is not working which is one issue, but even stranger I'm not getting any output from the dynamodb update. The last line of output is the console.log which details the SQS message, then the function ends.
How is this possible? Shouldn't dynamo return some kind of output?
console.log('Loading function');
const util = require('util')
const AWS = require('aws-sdk');
var documentClient = new AWS.DynamoDB.DocumentClient();
exports.handler = async(event) => {
//console.log('Received event:', JSON.stringify(event, null, 2));
for (const { messageId, body } of event.Records) {
//const { body } = event.Records[0];
//console.log(body)
console.log('SQS message %s: %j', messageId, body);
const JSONBody = JSON.parse(body)
//const message = JSON.parse(test["Message"]);
const id = JSONBody.id;
const city = JSONBody.City;
const address = JSONBody.Address;
const params = {
TableName: 'myTable',
Key: {
ID: ':id',
},
UpdateExpression: 'set address = :address',
ExpressionAttributeValues: {
':id': id,
':address': address,
':sortKey': "null"
}
//ReturnValues: "UPDATED_NEW"
};
documentClient.update(params, function(err, data) {
if (err) console.log(err);
else console.log(data);
});
}
return `Successfully processed ${event.Records.length} messages.`;
};
There're a couple of ways to do this, but I'm not sure about your use cases: Are operations are critical? Do the failed items need to be handled? Are performance need to be boosted as the large dataset? etc...
// I'm not recommend to this implementation
const { DynamoDB } = require('aws-sdk');
const documentClient = new DynamoDB.DocumentClient();
exports.handler = async (event) => {
for (const { messageId, body } of event.Records) {
console.log('SQS message %s: %j', messageId, body);
// Parse json is dangerous without knowing the structure, remember to handle
// when error occured
const JSONBody = JSON.parse(body)
const id = JSONBody.id;
const address = JSONBody.Address;
const params = {
TableName: 'myTable',
Key: {
ID: ':id',
},
UpdateExpression: 'set address = :address',
ExpressionAttributeValues: {
':id': id,
':address': address,
':sortKey': "null"
},
ReturnValues: "UPDATED_NEW"
};
// Wait for each update operation to finished
// IO time will be extended
await documentClient.update(params)
.promise()
.then(res => {
console.log(res)
})
.catch(err => {
console.error(err);
})
}
// In case there's a failed update operation, this message still be returned by lambda handler
return `Successfully processed ${event.Records.length} messages.`;
};
// My recommended way
const AWS = require('aws-sdk');
const documentClient = new AWS.DynamoDB.DocumentClient();
exports.handler = async (event) => {
// All the update operation is fired nearly concurrently
// IO will be reduced
return Promise.all(event.Records.map(({ messageId, body }) => {
console.log('SQS message %s: %j', messageId, body);
// Parse json is dangerous without knowing the structure, remember to handle
// when error occured
const JSONBody = JSON.parse(body)
const id = JSONBody.id;
const address = JSONBody.Address;
const params = {
TableName: 'myTable',
Key: {
ID: ':id',
},
UpdateExpression: 'set address = :address',
ExpressionAttributeValues: {
':id': id,
':address': address,
':sortKey': "null"
},
ReturnValues: "UPDATED_NEW"
};
return documentClient.update(params)
.promise()
.then(res => {
console.log(res)
})
}))
// When lambda handler finised all the update, lambda handler return a string
.then(() => {
return `Successfully processed ${event.Records.length} messages.`
})
// In case any of the update operation failed, the next update operations is cancelled
// Lambda handler return undefined
.catch(error => {
console.error(error);
// return some error for lambda response.
})
};
P/s: My two cents, before you do any kind of Lamba development with node.js runtime, you should understand the differences between callbacks, promises, await/async in javascript.
Fixed it by making the method synchronous, i.e removed async from the function def

Transaction numbers are only allowed on storage engines that support document-level locking - MongodbMemoryServer/Mochai/Chai/Supertest

FIXED: USER storageEngine: "wiredTiger"
I use Mocha / Chai / Supertest and Mongodb-Memory-Server to test my app. But's I received error: Transaction numbers are only allowed on storage engines that support document-level locking
In real database and test by postman, it's working well.
My code:
In database.js
const mongoose = require('mongoose')
const { MongoMemoryReplSet } = require('mongodb-memory-server')
mongoose.set('useFindAndModify', false);
const connect = async () => {
try {
let url = process.env.MONGO_URL
let options = {
//Something
}
if (process.env.NODE_ENV === 'test') {
const replSet = new MongoMemoryReplSet();
await replSet.waitUntilRunning();
const uri = await replSet.getUri();
await mongoose.connect(uri, options)
//log connected
} else {
await mongoose.connect(url, options)
//log connected
}
} catch (error) {
//error
}
}
I have two model: Company and User. I made a function to add a member to company with used transaction. My code
const addMember = async (req, res, next) => {
const { companyId } = req.params
const { userId } = req.body
const session = await mongoose.startSession()
try {
await session.withTransaction(async () => {
const [company, user] = await Promise.all([
Company.findOneAndUpdate(
//Something
).session(session),
User.findByIdAndUpdate(
//Something
).session(session)
])
//Something if... else
return res.json({
message: `Add member successfully!`,
})
})
} catch (error) {
//error
}
}
Here's router:
router.post('/:companyId/add-member',
authentication.required,
company.addMember
)
Test file:
const expect = require('chai').expect
const request = require('supertest')
const app = require('../app')
describe('POST /company/:companyId/add-member', () => {
it('OK, add member', done => {
request(app).post(`/company/${companyIdEdited}/add-member`)
.set({ "x-access-token": signedUserTokenKey })
.send({userId: memberId})
.then(res => {
console.log(res.body)
expect(res.statusCode).to.equals(200)
done()
})
.catch((error) => done(error))
})
})
And i received error: Transaction numbers are only allowed on storage engines that support document-level locking'
How can I fix this?
Add retryWrites=false to your database uri. Example below:
mongodb://xx:xx#xyz.com:PORT,zz.com:33427/database-name?replicaSet=rs-xx&ssl=true&retryWrites=false

BINANCE API - How to get Account info with User Data Stream

I'm using Node and the ws npm package to work with WebSockets. Got the listenKey as stated in the docs (below), but I'm unable to get my account info using User Data Stream. I'd prefer to use a stream to read my most current account info (balances, etc) since using the Rest API to do it incurs a penalty (WEIGHT: 5) each time.
I've tried doing ws.send('outboundAccountInfo') but no joy.
DOCS: https://github.com/binance-exchange/binance-official-api-docs/blob/master/user-data-stream.md
Full code example - does not return any data:
import request from 'request'
import WebSocket from 'ws'
import { API_KEY } from '../../assets/secrets'
const DATA_STREAM_ENDPOINT = 'wss://stream.binance.com:9443/ws'
const BINANCE_API_ROOT = 'https://api.binance.com'
const LISTEN_KEY_ENDPOINT = `${BINANCE_API_ROOT}/api/v1/userDataStream`
const fetchAccountWebsocketData = async() => {
const listenKey = await fetchListenKey()
console.log('-> ', listenKey) // valid key is returned
let ws
try {
ws = await openWebSocket(`${DATA_STREAM_ENDPOINT}/${listenKey}`)
} catch (err) {
throw(`ERROR - fetchAccountWebsocketData: ${err}`)
}
// Nothing returns from either
ws.on('message', data => console.log(data))
ws.on('outboundAccountInfo', accountData => console.log(accountData))
}
const openWebSocket = endpoint => {
const p = new Promise((resolve, reject) => {
const ws = new WebSocket(endpoint)
console.log('\n-->> New Account Websocket')
ws.on('open', () => {
console.log('\n-->> Websocket Account open...')
resolve(ws)
}, err => {
console.log('fetchAccountWebsocketData error:', err)
reject(err)
})
})
p.catch(err => console.log(`ERROR - fetchAccountWebsocketData: ${err}`))
return p
}
const fetchListenKey = () => {
const p = new Promise((resolve, reject) => {
const options = {
url: LISTEN_KEY_ENDPOINT,
headers: {'X-MBX-APIKEY': API_KEY}
}
request.post(options, (err, httpResponse, body) => {
if (err)
return reject(err)
resolve(JSON.parse(body).listenKey)
})
})
p.catch(err => console.log(`ERROR - fetchListenKey: ${err}`))
return p
}
export default fetchAccountWebsocketData
Was stuggling too .... for hours !!!
https://www.reddit.com/r/BinanceExchange/comments/a902cq/user_data_streams_has_anyone_used_it_successfully/
The binance user data stream doesn't return anything when you connect
to it, only when something changes in your account. Try running your
code, then go to binance and place an order in the book, you should
see some data show up*

Categories

Resources