JestJS - Multiple websocket connections hangs Jest - javascript

I'm using this function to test my server that creates as many websocket connections and checks when my game starts. However no matter the timeout I assign, it hangs on JestJS. On the browser - Firefox, Edge Chromium it works perfectly fine.
function checkGameStart(numberOfBots) {
return new Promise((resolve, reject) => {
let clients = [];
let connection = [];
for (let i = 0; i < numberOfBots; i++) {
clients.push(new WebSocket('ws://127.0.0.1:8080'));
connection.push(false);
clients[i].onmessage = (msg) => {
let data = JSON.parse(msg.data);
if (data.title === "gameStarted") {
connection[i] = true;
checkAllClientsReady();
}
}
clients[i].onerror = (err) => reject(err);
}
function checkAllClientsReady() {
if (!(connection.includes(false))) {
resolve(true);
closeAllConnections();
}
}
function closeAllConnections() {
for (let i = 0; i < clients; i++) {
clients[i].close()
}
}
});
}
Does anyone know why it happens what I can do to make sure it doesn't happen again.
Test code;
test('Check the game starts', () => {
return expect(checkGameStart(4)).resolves.toBe(true);
});

I refactored your code a little bit and added a WebSocket server using ws NPM package on the test setup:
const { WebSocketServer } = require('ws')
const port = 8080
const wss = new WebSocketServer({ port })
beforeAll(() => {
wss.on('connection', (ws) => {
ws.send(JSON.stringify({ title: 'gameStarted' }))
})
})
afterAll(() => {
wss.close()
})
async function checkGameStart(numberOfBots) {
await Promise.all(
new Array(numberOfBots).fill(null)
.map(() => new Promise((resolve, reject) => {
const ws = new WebSocket(`ws://localhost:${port}`)
ws.onmessage = ({ data }) => {
const { title } = JSON.parse(data)
if (title === 'gameStarted') {
ws.close()
resolve()
}
}
ws.onerror = (err) => {
ws.close()
reject(err)
}
}))
)
return true
}
test('Check the game starts', async () => {
await expect(checkGameStart(4)).resolves.toBe(true);
});
$ npx jest
PASS ./websocket.test.js
✓ Check the game starts (64 ms)
Test Suites: 1 passed, 1 total
Tests: 1 passed, 1 total
Snapshots: 0 total
Time: 0.708 s, estimated 1 s
Ran all test suites.
This only works if Jest is also configured to use a jsdom test environment:
// jest.config.js
module.exports = {
testEnvironment: "jsdom",
};
Otherwise, the WebSocket constructor will be undefined since it is only available on web browser environments and, by default, Jest runs in node environment.

Related

Lesson 9 hardhat raffle staging test error: No Contract deployed with name VRFCoordinatorV2Mock

when I test my tests on GOERLI network, it shows an Error that reads: 1) Raffle Unit Tests
"before each" hook for "works with live Chainlink Keepers and Chainlink VRF, we get a random winner":
Error: No Contract deployed with name VRFCoordinatorV2Mock
at Object.getContract (node_modules/#nomiclabs/hardhat-ethers/src/internal/helpers.ts:447:11)
at processTicksAndRejections (node:internal/process/task_queues:96:5)
at Context. (test/staging/Raffle.staging.test.js:15:38)
const { inputToConfig } = require("#ethereum-waffle/compiler")
const { assert, expect } = require("chai")
const { network, getNamedAccounts, deployments, ethers } = require("hardhat")
const { developmentChains, networkConfig } = require("../../helper-hardhat-config")
developmentChains.includes(network.name)
? describe.skip
: describe("Raffle Unit Tests", function () {
let raffle, raffleEntranceFee, deployer
beforeEach(async function () {
deployer = (await getNamedAccounts()).deployer
await deployments.fixture(["all"])
raffle = await ethers.getContract("Raffle", deployer)
vrfCoordinatorV2Mock = await ethers.getContract("VRFCoordinatorV2Mock", deployer)
raffleEntranceFee = await raffle.getEntranceFee()
})
describe("fulfillRandomWords", function () {
it("works with live Chainlink Keepers and Chainlink VRF, we get a random winner", async function () {
// enter the raffle
console.log("Setting up test...")
await deployments.fixture(["mocks"])
const startingTimeStamp = await raffle.getLastTimeStamp()
const accounts = await ethers.getSigners()
console.log("Setting up Listener...")
await new Promise(async (resolve, reject) => {
// setup listener before we enter the raffle
// Just in case the blockchain moves REALLY fast
raffle.once("WinnerPicked", async () => {
console.log("WinnerPicked event fired!")
try {
const recentWinner = await raffle.getRecentWinner()
const raffleState = await raffle.getRaffleState()
const winnerEndingBalance = await accounts[0].getBalance()
const endingTimeStamp = await raffle.getLastTimeStamp()
await expect(raffle.getPlayer(0)).to.be.reverted
assert.equal(recentWinner.toString(), accounts[0].address)
assert.equal(raffleState, 0)
assert.equal(
winnerEndingBalance.toString(),
winnerStartingBalance.add(raffleEntranceFee).toString()
)
assert(endingTimeStamp > startingTimeStamp)
resolve()
} catch (error) {
console.log(error)
reject(error)
}
})
// Then entering the raffle
console.log("Entering Raffle...")
const tx = await raffle.enterRaffle({ value: raffleEntranceFee })
await tx.wait(1)
console.log("Ok, time to wait...")
const winnerStartingBalance = await accounts[0].getBalance()
// and this code WONT complete until our listener has finished listening!
})
})
})
})
Can anyone help me out with these, here's the link to my repo: https://github.com/anooj1/hardhat-raffle

What am I missing here to get data out of this spawned Node.js child process?

I'm trying to use a spawned command-line lzip process to expand an lzipped data stream, as I haven't found any good native JavaScript tools to do the job.
I can get this to work using files and file descriptors, but it seems stupid to have to write out, and read back in, a bunch of temporary scratch files. I want to do all of the work I can in memory.
So here's the code I'm trying to use:
import { requestBinary } from 'by-request';
import { spawn } from 'child_process';
import { min } from '#tubular/math';
export async function tarLzToZip(url: string): Promise<void> {
const lzData = await requestBinary(url, { headers: { 'User-Agent': 'curl/7.64.1' } });
const lzipProc = spawn('lzip', ['-d'], { stdio: ['pipe', 'pipe', process.stderr] });
let tarContent = Buffer.alloc(0);
lzipProc.stdout.on('data', data => {
tarContent = Buffer.concat([tarContent, data], tarContent.length + data.length);
});
for (let offset = 0; offset < lzData.length; offset += 4096) {
await new Promise<void>((resolve, reject) => {
lzipProc.stdin.write(lzData.slice(offset, min(offset + 4096, lzData.length)), err => {
if (err)
reject(err);
else
resolve();
});
});
}
await new Promise<void>((resolve, reject) => {
lzipProc.stdin.end((err: any) => {
if (err)
reject(err);
else
resolve();
});
});
console.log('data length:', tarContent.length);
}
When I step through with a debugger everything seems to be going well with the sending data into lzipProc.stdin. (I've tried doing both chunks like this, and all data in one go.) lzipProc.stdout.on('data', data =>, however, never gets called. When I get to the end, tarContent is empty.
What's missing here? Do I need a different stdio config? Are there different stream objects I should be using? Do I need to more goats to sacrifice under the light of a full moon?
UPDATE
My solution based on Matt's excellent answer posted below, with all of the particulars for my use case:
import archiver from 'archiver';
import fs, { ReadStream } from 'fs';
import fsp from 'fs/promises';
import needle from 'needle';
import path from 'path';
import { spawn } from 'child_process';
import tar from 'tar-stream';
const baseUrl = 'https://data.iana.org/time-zones/releases/';
export async function codeAndDataToZip(version: string): Promise<ReadStream> {
return compressedTarToZip(`${baseUrl}tzdb-${version}.tar.lz`);
}
export async function codeToZip(version: string): Promise<ReadStream> {
return compressedTarToZip(`${baseUrl}tzcode${version}.tar.gz`);
}
export async function dataToZip(version: string): Promise<ReadStream> {
return compressedTarToZip(`${baseUrl}tzdata${version}.tar.gz`);
}
async function compressedTarToZip(url: string): Promise<ReadStream> {
const fileName = /([-a-z0-9]+)\.tar\.[lg]z$/i.exec(url)[1] + '.zip';
const filePath = path.join(process.env.TZE_ZIP_DIR || path.join(__dirname, 'tz-zip-cache'), fileName);
if (await fsp.stat(filePath).catch(() => false))
return fs.createReadStream(filePath);
const [command, args] = url.endsWith('.lz') ? ['lzip', ['-d']] : ['gzip', ['-dc']];
const originalArchive = needle.get(url, { headers: { 'User-Agent': 'curl/7.64.1' } });
const tarExtract = tar.extract({ allowUnknownFormat: true });
const zipPack = archiver('zip');
const writeFile = fs.createWriteStream(filePath);
const commandProc = spawn(command, args);
commandProc.stderr.on('data', msg => { throw new Error(`${command} error: ${msg}`); });
commandProc.stderr.on('error', err => { throw err; });
originalArchive.pipe(commandProc.stdin);
commandProc.stdout.pipe(tarExtract);
tarExtract.on('entry', (header, stream, next) => {
zipPack.append(stream, { name: header.name, date: header.mtime });
stream.on('end', next);
});
tarExtract.on('finish', () => zipPack.finalize());
zipPack.pipe(writeFile);
return new Promise<ReadStream>((resolve, reject) => {
const rejectWithError = (err: any): void =>
reject(err instanceof Error ? err : new Error(err.message || err.toString()));
writeFile.on('error', rejectWithError);
writeFile.on('finish', () => resolve(fs.createReadStream(filePath)));
tarExtract.on('error', err => {
// tar-stream has a problem with the format of a few of the tar files
// dealt with here, which nevertheless are valid archives.
if (/unexpected end of data|invalid tar header/i.test(err.message))
console.error('Archive %s: %s', url, err.message);
else
reject(err);
});
zipPack.on('error', rejectWithError);
zipPack.on('warning', rejectWithError);
commandProc.on('error', rejectWithError);
commandProc.on('exit', err => err && reject(new Error(`${command} error: ${err}`)));
originalArchive.on('error', rejectWithError);
});
}
I would leave the streaming to node or packages, unless you have specific processing that needs to be done. Just wrap the whole stream setup in a promise.
If you also stream the request/response, it can be piped into the decompresser. Then stdout from the decompressor can be piped to the archive stream handlers.
import fs from 'fs'
import { spawn } from 'child_process'
import needle from 'needle'
import tar from 'tar-stream'
import archiver from 'archiver'
export function tarLzToZip(url) {
return new Promise((resolve, reject) => {
// Setup streams
const res = needle.get(url)
const lzipProc = spawn('lzip', ['-dc'], { stdio: ['pipe','pipe',process.stderr] })
const tarExtract = tar.extract()
const zipPack = archiver('zip')
const writeFile = fs.createWriteStream('tardir.zip')
// Pipelines and processing
res.pipe(gzipProc.stdin)
lzipProc.stdout.pipe(tarExtract)
// tar -> zip (simple file name)
tarExtract.on('entry', function(header, stream, next) {
console.log('entry', header)
zipPack.append(stream, { name: header.name })
stream.on('end', () => next())
})
tarExtract.on('finish', function() {
zipPack.finalize()
})
zipPack.pipe(writeFile)
// Handle the things
writeFile.on('error', reject)
writeFile.on('close', () => console.log('write close'))
writeFile.on('finish', resolve(true))
tarExtract.on('error', reject)
zipPack.on('error', reject)
zipPack.on('warning', reject)
lzipProc.on('error', reject)
lzipProc.on('exit', code => {if (code !== 0) reject(new Error(`lzip ${code}`))})
res.on('error', reject)
res.on('done', ()=> console.log('request done', res.request.statusCode))
})
}
You might want to be a bit more verbose about logging errors and stderr as the singular promise reject can easily hide what actually happened across the multiple streams.

VSCode/Mocha exits prematurely when running extension tests

I am trying to UI test the command in my extension. When I run the test, it quits without any error message before reaching assert.equal() function.
The code of my test is as follows:
suite("Extension Tests", () => {
const fixtureFolderLocation = "../fixture/";
test("Wrap in parentheses", async () => {
const uri = vscode.Uri.file(
path.join(__dirname, fixtureFolderLocation, "fixture2.html"),
);
const document = await vscode.workspace.openTextDocument(uri);
const editor = await vscode.window.showTextDocument(document);
editor.selection = new vscode.Selection(new vscode.Position(0, 0), new vscode.Position(0, 4));
await vscode.commands.executeCommand("nkatex.wrapInParentheses");
const text = document.getText(new vscode.Range(new vscode.Position(0, 0), new vscode.Position(0, 21)));
assert.equal(text, "\\left ( word\\right ) ");
});
});
Output of the test:
$ node ./out/test/runTest.js
Found .vscode-test/vscode-1.42.1. Skipping download.
[main 2020-02-16T15:06:03.708Z] update#setState idle
Extension Tests
Exit code: 0
Done
Done in 17.30s.
runTest.ts:
async function main() {
try {
// Download VS Code, unzip it and run the integration test
await runTests({
extensionDevelopmentPath: path.resolve(__dirname, "../../"),
extensionTestsPath: path.resolve(__dirname, "./suite"),
launchArgs: [
"--disable-extensions",
],
});
} catch (err) {
console.error("Failed to run tests");
process.exit(1);
}
}
main();
index.ts:
export const run = async (): Promise<void> => {
// Create the mocha test
const mocha = new Mocha({
ui: "tdd",
});
mocha.useColors(true);
const testsRoot = path.resolve(__dirname, "..");
const files = await async.glob("**/**.test.js", { cwd: testsRoot });
for (const file of files) {
mocha.addFile(path.resolve(testsRoot, file));
}
mocha.run((failureNumber) => {
if (failureNumber > 0) {
throw new Error(`${failureNumber} tests failed.`);
}
});
};
I have also tried using done() function and waiting a specific amount of time, but it did not help:
const sleep = (ms = 1000) => new Promise((resolve) => setTimeout(resolve, ms));
suite("Extension Tests", () => {
const fixtureFolderLocation = "../fixture/";
test("Wrap in parentheses", async (done) => {
const uri = vscode.Uri.file(
path.join(__dirname, fixtureFolderLocation, "fixture2.html"),
);
const document = await vscode.workspace.openTextDocument(uri);
const editor = await vscode.window.showTextDocument(document);
await sleep(2000);
editor.selection = new vscode.Selection(new vscode.Position(0, 0), new vscode.Position(0, 4));
await vscode.commands.executeCommand("nkatex.wrapInParentheses");
await sleep();
const text = document.getText(new vscode.Range(new vscode.Position(0, 0), new vscode.Position(0, 21)));
assert.equal(text, "\\left ( word\\right ) ");
done();
});
});
The test exits at random places. Sometimes the tests passes correctly, but more often it quits before assertion. I believe this has something to with the code being asynchronous, but I have seen similar code in other tests, like here.

How can I connect multichain node with app?

I must create an app based on multichain for a project with my university. I must use the blockchain as database and I should create an app to put information in the block.
I’m trying to use Meteor and JSON-RPC API (https://github.com/scoin/multichain-node) but I can’t connect the node. Someone could help me? Or someone could suggest me an alternative to Meteor?
I installed multichain-node with
npm install multichain-node --save
this created multichain-node folder in node_modules.
In my main.js i'm trying to connect with the node (that is running in Terminal)
import './main.html';
console.log("b4 connection");
const connection = {
port: 6744,
host: '127.0.0.1',
user: "multichainrpc",
pass: "5zGVBTY7nVsnEmp3vbGq8LTbmnmjueYkiTLc5pRzE7xh"
}
const multichain = require("../node_modules/multichain-node/index.js")(connection);
console.log("info");
let listenForConfirmations = (txid) => {
console.log("WAITING FOR CONFIRMATIONS")
return new Promise((resolve, reject) => {
var interval = setInterval(() => {
getConfirmations(txid)
.then(confirmations => {
if(confirmations > 0){
clearInterval(interval);
return resolve()
}
})
.catch(err => {
return reject(err);
})
}, 5000)
})
}
let getConfirmations = async (txid) => {
let res = await multichain.getWalletTransaction({
txid: txid
})
return res.confirmations;
}
let startTests = () => {
const state = {};
console.log("Running Tests")
console.log("TEST: GET INFO")
multichain.getInfo((err, res) => {
console.log(res);
})
}
startTests()
This is the error in Chrome console:

Jest - stub function within function

I'm writing unit-tests, where I need to set a mock response for a function within a function.
This is the function I want to mock:
cassandraDriver.js
module.exports = ({
cassandra_user,
cassandra_password,
cassandra_address
}) => {
if (!cassandra_address.length) throw Error('Cassandra address is not valid')
return new Promise((resolve, reject) => {
try {
const client = new driver.Client({
contactPoints: cassandra_address.split(','),
authProvider: authProvider(cassandra_user, cassandra_password),
queryconfig: {
consistency: driver.types.consistencies.quorum
}
})
return resolve(client)
} catch (e) {
reject(e)
}
})
}
This is the file that uses it:
const {
cassandraDriver
} = require('./lib')
module.exports = async ({
username = 'cassandra', //default values
password = 'cassandra', //default values
address,
keyspace,
replication_factor = 1,
migration_script_path,
logger = require('bunyan').createLogger({name: 'BuildCassandra'})
} = {}) => {
try {
const client = await cassandraDriver(username, password, address)
}).catch(err => {
throw Error(err)
})
} catch (e) {
logger.error(e)
throw e
}
}
How can I mock the call to 'cassandraDriver' in unit-tests? I tried using rewire, but the method is not exposed as it normally would be.
Thanks in advance.
let's modify your function so that it can accept a mock driver instead of cassandraDriver
const {
cassandraDriver
} = require('./lib')
module.exports = async ({
username = 'cassandra',
password = 'cassandra',
address,
keyspace,
replication_factor = 1,
migration_script_path,
logger = require('bunyan').createLogger({
name: 'BuildCassandra'
}),
driver = cassandraDriver
} = {}) => {
try {
const client = await driver(
username,
password,
address
})
} catch (e) {
logger.error(e)
throw e
}
}
(i also removed a superfluous .catch block)
next, you should create a "cassandra-driver-mock.js" which emulates the behaviour of the cassandra driver for your unit tests
the unit tests, of course, would pass the mock instead of the real driver as an option parameter
You can stub the module which exports cassandraDriver in your test file:
import cassandraDriver from "<path-to-cassandraDriver.js>";
jest.mock("<path-to-cassandraDriver.js>", () => jest.mock());
cassandraDriver.mockImplementation(() => {
// Stub implementation and return value
});
See Manual Mocks for more information.

Categories

Resources