How can I connect multichain node with app? - javascript

I must create an app based on multichain for a project with my university. I must use the blockchain as database and I should create an app to put information in the block.
I’m trying to use Meteor and JSON-RPC API (https://github.com/scoin/multichain-node) but I can’t connect the node. Someone could help me? Or someone could suggest me an alternative to Meteor?
I installed multichain-node with
npm install multichain-node --save
this created multichain-node folder in node_modules.
In my main.js i'm trying to connect with the node (that is running in Terminal)
import './main.html';
console.log("b4 connection");
const connection = {
port: 6744,
host: '127.0.0.1',
user: "multichainrpc",
pass: "5zGVBTY7nVsnEmp3vbGq8LTbmnmjueYkiTLc5pRzE7xh"
}
const multichain = require("../node_modules/multichain-node/index.js")(connection);
console.log("info");
let listenForConfirmations = (txid) => {
console.log("WAITING FOR CONFIRMATIONS")
return new Promise((resolve, reject) => {
var interval = setInterval(() => {
getConfirmations(txid)
.then(confirmations => {
if(confirmations > 0){
clearInterval(interval);
return resolve()
}
})
.catch(err => {
return reject(err);
})
}, 5000)
})
}
let getConfirmations = async (txid) => {
let res = await multichain.getWalletTransaction({
txid: txid
})
return res.confirmations;
}
let startTests = () => {
const state = {};
console.log("Running Tests")
console.log("TEST: GET INFO")
multichain.getInfo((err, res) => {
console.log(res);
})
}
startTests()
This is the error in Chrome console:

Related

SlashCommands not showing up

I just finished my bot and wanted to invite it to another server to test it out.
However, when I typed / no commands showed up.
When I invited the bot I enabled application.commands so I can use the slashcommands but it still did not work. My bot also has a global slashcommand handler so it should normally work right?
I don't know if the handler code is needed but I'll still add it here in case you do need it:
const { Perms } = require('../Validation/Permissions');
const { Client } = require('discord.js');
/**
* #param {Client} client
*/
module.exports = async (client, PG, Ascii) => {
const Table = new Ascii("Command Loaded");
CommandsArray = [];
(await PG(`${process.cwd()}/Commands/*/*.js`)).map(async (file) => {
const command = require(file);
if(!command.name)
return Table.addRow(file.split("/")[7], "⛔ FAILED", "Missing a name.")
if(command.type !== "USER" && !command.description)
return Table.addRow(command.name, "⛔ FAILED", "Missing a description.")
if(command.permission){
if(Perms.includes(command.permission))
command.defaultPermission = false;
else
return Table.addRow(command.name, "⛔ FAILED", "Permission is invalid.")
}
client.commands.set(command.name, command);
CommandsArray.push(command);
await Table.addRow(command.name, "✅ SUCCESSFUL");
});
console.log(Table.toString());
// PERMISSIONS CHECK //
client.on("ready", async () =>{
client.guilds.cache.forEach((g) => {
g.commands.set(CommandsArray).then(async (command) =>{
const Roles = (commandName) => {
const cmdPerms = CommandsArray.find((c) => c.name === commandName).permission;
if(!cmdPerms) return null;
return g.roles.cache.filter((r) => r.permissions.has(cmdPerms) && !r.managed).first(10);
}
const fullPermissions = command.reduce((accumulator, r) =>{
const roles = Roles(r.name);
if(!roles) return accumulator;
const permissions = roles.reduce((a, r) =>{
return [...a, {id: r.id, type: "ROLE", permission:true}]
}, []);
return [...accumulator, {id: r.id, permissions}]
}, []);
await g.commands.permissions.set({ fullPermissions });
});
})
});
}
You have to register the command first, which is essentially creating the command within Discord. Follow the guide to get it set up. And to clarify, you only register the command once, if that isn't obvious.
You can use Postman to create/edit the commands if you are comfortable with it.

JestJS - Multiple websocket connections hangs Jest

I'm using this function to test my server that creates as many websocket connections and checks when my game starts. However no matter the timeout I assign, it hangs on JestJS. On the browser - Firefox, Edge Chromium it works perfectly fine.
function checkGameStart(numberOfBots) {
return new Promise((resolve, reject) => {
let clients = [];
let connection = [];
for (let i = 0; i < numberOfBots; i++) {
clients.push(new WebSocket('ws://127.0.0.1:8080'));
connection.push(false);
clients[i].onmessage = (msg) => {
let data = JSON.parse(msg.data);
if (data.title === "gameStarted") {
connection[i] = true;
checkAllClientsReady();
}
}
clients[i].onerror = (err) => reject(err);
}
function checkAllClientsReady() {
if (!(connection.includes(false))) {
resolve(true);
closeAllConnections();
}
}
function closeAllConnections() {
for (let i = 0; i < clients; i++) {
clients[i].close()
}
}
});
}
Does anyone know why it happens what I can do to make sure it doesn't happen again.
Test code;
test('Check the game starts', () => {
return expect(checkGameStart(4)).resolves.toBe(true);
});
I refactored your code a little bit and added a WebSocket server using ws NPM package on the test setup:
const { WebSocketServer } = require('ws')
const port = 8080
const wss = new WebSocketServer({ port })
beforeAll(() => {
wss.on('connection', (ws) => {
ws.send(JSON.stringify({ title: 'gameStarted' }))
})
})
afterAll(() => {
wss.close()
})
async function checkGameStart(numberOfBots) {
await Promise.all(
new Array(numberOfBots).fill(null)
.map(() => new Promise((resolve, reject) => {
const ws = new WebSocket(`ws://localhost:${port}`)
ws.onmessage = ({ data }) => {
const { title } = JSON.parse(data)
if (title === 'gameStarted') {
ws.close()
resolve()
}
}
ws.onerror = (err) => {
ws.close()
reject(err)
}
}))
)
return true
}
test('Check the game starts', async () => {
await expect(checkGameStart(4)).resolves.toBe(true);
});
$ npx jest
PASS ./websocket.test.js
✓ Check the game starts (64 ms)
Test Suites: 1 passed, 1 total
Tests: 1 passed, 1 total
Snapshots: 0 total
Time: 0.708 s, estimated 1 s
Ran all test suites.
This only works if Jest is also configured to use a jsdom test environment:
// jest.config.js
module.exports = {
testEnvironment: "jsdom",
};
Otherwise, the WebSocket constructor will be undefined since it is only available on web browser environments and, by default, Jest runs in node environment.

What am I missing here to get data out of this spawned Node.js child process?

I'm trying to use a spawned command-line lzip process to expand an lzipped data stream, as I haven't found any good native JavaScript tools to do the job.
I can get this to work using files and file descriptors, but it seems stupid to have to write out, and read back in, a bunch of temporary scratch files. I want to do all of the work I can in memory.
So here's the code I'm trying to use:
import { requestBinary } from 'by-request';
import { spawn } from 'child_process';
import { min } from '#tubular/math';
export async function tarLzToZip(url: string): Promise<void> {
const lzData = await requestBinary(url, { headers: { 'User-Agent': 'curl/7.64.1' } });
const lzipProc = spawn('lzip', ['-d'], { stdio: ['pipe', 'pipe', process.stderr] });
let tarContent = Buffer.alloc(0);
lzipProc.stdout.on('data', data => {
tarContent = Buffer.concat([tarContent, data], tarContent.length + data.length);
});
for (let offset = 0; offset < lzData.length; offset += 4096) {
await new Promise<void>((resolve, reject) => {
lzipProc.stdin.write(lzData.slice(offset, min(offset + 4096, lzData.length)), err => {
if (err)
reject(err);
else
resolve();
});
});
}
await new Promise<void>((resolve, reject) => {
lzipProc.stdin.end((err: any) => {
if (err)
reject(err);
else
resolve();
});
});
console.log('data length:', tarContent.length);
}
When I step through with a debugger everything seems to be going well with the sending data into lzipProc.stdin. (I've tried doing both chunks like this, and all data in one go.) lzipProc.stdout.on('data', data =>, however, never gets called. When I get to the end, tarContent is empty.
What's missing here? Do I need a different stdio config? Are there different stream objects I should be using? Do I need to more goats to sacrifice under the light of a full moon?
UPDATE
My solution based on Matt's excellent answer posted below, with all of the particulars for my use case:
import archiver from 'archiver';
import fs, { ReadStream } from 'fs';
import fsp from 'fs/promises';
import needle from 'needle';
import path from 'path';
import { spawn } from 'child_process';
import tar from 'tar-stream';
const baseUrl = 'https://data.iana.org/time-zones/releases/';
export async function codeAndDataToZip(version: string): Promise<ReadStream> {
return compressedTarToZip(`${baseUrl}tzdb-${version}.tar.lz`);
}
export async function codeToZip(version: string): Promise<ReadStream> {
return compressedTarToZip(`${baseUrl}tzcode${version}.tar.gz`);
}
export async function dataToZip(version: string): Promise<ReadStream> {
return compressedTarToZip(`${baseUrl}tzdata${version}.tar.gz`);
}
async function compressedTarToZip(url: string): Promise<ReadStream> {
const fileName = /([-a-z0-9]+)\.tar\.[lg]z$/i.exec(url)[1] + '.zip';
const filePath = path.join(process.env.TZE_ZIP_DIR || path.join(__dirname, 'tz-zip-cache'), fileName);
if (await fsp.stat(filePath).catch(() => false))
return fs.createReadStream(filePath);
const [command, args] = url.endsWith('.lz') ? ['lzip', ['-d']] : ['gzip', ['-dc']];
const originalArchive = needle.get(url, { headers: { 'User-Agent': 'curl/7.64.1' } });
const tarExtract = tar.extract({ allowUnknownFormat: true });
const zipPack = archiver('zip');
const writeFile = fs.createWriteStream(filePath);
const commandProc = spawn(command, args);
commandProc.stderr.on('data', msg => { throw new Error(`${command} error: ${msg}`); });
commandProc.stderr.on('error', err => { throw err; });
originalArchive.pipe(commandProc.stdin);
commandProc.stdout.pipe(tarExtract);
tarExtract.on('entry', (header, stream, next) => {
zipPack.append(stream, { name: header.name, date: header.mtime });
stream.on('end', next);
});
tarExtract.on('finish', () => zipPack.finalize());
zipPack.pipe(writeFile);
return new Promise<ReadStream>((resolve, reject) => {
const rejectWithError = (err: any): void =>
reject(err instanceof Error ? err : new Error(err.message || err.toString()));
writeFile.on('error', rejectWithError);
writeFile.on('finish', () => resolve(fs.createReadStream(filePath)));
tarExtract.on('error', err => {
// tar-stream has a problem with the format of a few of the tar files
// dealt with here, which nevertheless are valid archives.
if (/unexpected end of data|invalid tar header/i.test(err.message))
console.error('Archive %s: %s', url, err.message);
else
reject(err);
});
zipPack.on('error', rejectWithError);
zipPack.on('warning', rejectWithError);
commandProc.on('error', rejectWithError);
commandProc.on('exit', err => err && reject(new Error(`${command} error: ${err}`)));
originalArchive.on('error', rejectWithError);
});
}
I would leave the streaming to node or packages, unless you have specific processing that needs to be done. Just wrap the whole stream setup in a promise.
If you also stream the request/response, it can be piped into the decompresser. Then stdout from the decompressor can be piped to the archive stream handlers.
import fs from 'fs'
import { spawn } from 'child_process'
import needle from 'needle'
import tar from 'tar-stream'
import archiver from 'archiver'
export function tarLzToZip(url) {
return new Promise((resolve, reject) => {
// Setup streams
const res = needle.get(url)
const lzipProc = spawn('lzip', ['-dc'], { stdio: ['pipe','pipe',process.stderr] })
const tarExtract = tar.extract()
const zipPack = archiver('zip')
const writeFile = fs.createWriteStream('tardir.zip')
// Pipelines and processing
res.pipe(gzipProc.stdin)
lzipProc.stdout.pipe(tarExtract)
// tar -> zip (simple file name)
tarExtract.on('entry', function(header, stream, next) {
console.log('entry', header)
zipPack.append(stream, { name: header.name })
stream.on('end', () => next())
})
tarExtract.on('finish', function() {
zipPack.finalize()
})
zipPack.pipe(writeFile)
// Handle the things
writeFile.on('error', reject)
writeFile.on('close', () => console.log('write close'))
writeFile.on('finish', resolve(true))
tarExtract.on('error', reject)
zipPack.on('error', reject)
zipPack.on('warning', reject)
lzipProc.on('error', reject)
lzipProc.on('exit', code => {if (code !== 0) reject(new Error(`lzip ${code}`))})
res.on('error', reject)
res.on('done', ()=> console.log('request done', res.request.statusCode))
})
}
You might want to be a bit more verbose about logging errors and stderr as the singular promise reject can easily hide what actually happened across the multiple streams.

Transaction numbers are only allowed on storage engines that support document-level locking - MongodbMemoryServer/Mochai/Chai/Supertest

FIXED: USER storageEngine: "wiredTiger"
I use Mocha / Chai / Supertest and Mongodb-Memory-Server to test my app. But's I received error: Transaction numbers are only allowed on storage engines that support document-level locking
In real database and test by postman, it's working well.
My code:
In database.js
const mongoose = require('mongoose')
const { MongoMemoryReplSet } = require('mongodb-memory-server')
mongoose.set('useFindAndModify', false);
const connect = async () => {
try {
let url = process.env.MONGO_URL
let options = {
//Something
}
if (process.env.NODE_ENV === 'test') {
const replSet = new MongoMemoryReplSet();
await replSet.waitUntilRunning();
const uri = await replSet.getUri();
await mongoose.connect(uri, options)
//log connected
} else {
await mongoose.connect(url, options)
//log connected
}
} catch (error) {
//error
}
}
I have two model: Company and User. I made a function to add a member to company with used transaction. My code
const addMember = async (req, res, next) => {
const { companyId } = req.params
const { userId } = req.body
const session = await mongoose.startSession()
try {
await session.withTransaction(async () => {
const [company, user] = await Promise.all([
Company.findOneAndUpdate(
//Something
).session(session),
User.findByIdAndUpdate(
//Something
).session(session)
])
//Something if... else
return res.json({
message: `Add member successfully!`,
})
})
} catch (error) {
//error
}
}
Here's router:
router.post('/:companyId/add-member',
authentication.required,
company.addMember
)
Test file:
const expect = require('chai').expect
const request = require('supertest')
const app = require('../app')
describe('POST /company/:companyId/add-member', () => {
it('OK, add member', done => {
request(app).post(`/company/${companyIdEdited}/add-member`)
.set({ "x-access-token": signedUserTokenKey })
.send({userId: memberId})
.then(res => {
console.log(res.body)
expect(res.statusCode).to.equals(200)
done()
})
.catch((error) => done(error))
})
})
And i received error: Transaction numbers are only allowed on storage engines that support document-level locking'
How can I fix this?
Add retryWrites=false to your database uri. Example below:
mongodb://xx:xx#xyz.com:PORT,zz.com:33427/database-name?replicaSet=rs-xx&ssl=true&retryWrites=false

BINANCE API - How to get Account info with User Data Stream

I'm using Node and the ws npm package to work with WebSockets. Got the listenKey as stated in the docs (below), but I'm unable to get my account info using User Data Stream. I'd prefer to use a stream to read my most current account info (balances, etc) since using the Rest API to do it incurs a penalty (WEIGHT: 5) each time.
I've tried doing ws.send('outboundAccountInfo') but no joy.
DOCS: https://github.com/binance-exchange/binance-official-api-docs/blob/master/user-data-stream.md
Full code example - does not return any data:
import request from 'request'
import WebSocket from 'ws'
import { API_KEY } from '../../assets/secrets'
const DATA_STREAM_ENDPOINT = 'wss://stream.binance.com:9443/ws'
const BINANCE_API_ROOT = 'https://api.binance.com'
const LISTEN_KEY_ENDPOINT = `${BINANCE_API_ROOT}/api/v1/userDataStream`
const fetchAccountWebsocketData = async() => {
const listenKey = await fetchListenKey()
console.log('-> ', listenKey) // valid key is returned
let ws
try {
ws = await openWebSocket(`${DATA_STREAM_ENDPOINT}/${listenKey}`)
} catch (err) {
throw(`ERROR - fetchAccountWebsocketData: ${err}`)
}
// Nothing returns from either
ws.on('message', data => console.log(data))
ws.on('outboundAccountInfo', accountData => console.log(accountData))
}
const openWebSocket = endpoint => {
const p = new Promise((resolve, reject) => {
const ws = new WebSocket(endpoint)
console.log('\n-->> New Account Websocket')
ws.on('open', () => {
console.log('\n-->> Websocket Account open...')
resolve(ws)
}, err => {
console.log('fetchAccountWebsocketData error:', err)
reject(err)
})
})
p.catch(err => console.log(`ERROR - fetchAccountWebsocketData: ${err}`))
return p
}
const fetchListenKey = () => {
const p = new Promise((resolve, reject) => {
const options = {
url: LISTEN_KEY_ENDPOINT,
headers: {'X-MBX-APIKEY': API_KEY}
}
request.post(options, (err, httpResponse, body) => {
if (err)
return reject(err)
resolve(JSON.parse(body).listenKey)
})
})
p.catch(err => console.log(`ERROR - fetchListenKey: ${err}`))
return p
}
export default fetchAccountWebsocketData
Was stuggling too .... for hours !!!
https://www.reddit.com/r/BinanceExchange/comments/a902cq/user_data_streams_has_anyone_used_it_successfully/
The binance user data stream doesn't return anything when you connect
to it, only when something changes in your account. Try running your
code, then go to binance and place an order in the book, you should
see some data show up*

Categories

Resources