I'm trying to use a node process to kick off an interactive docker session then automate some commands to it:
var spawn = require('pty.js').spawn;
var proc = spawn('docker', [ 'run', '-i', '-t', 'mycontainer' ], {
name: 'test',
rows: 30,
cols: 200,
cwd: process.env.HOME,
env: process.env
});
proc.on('data', function (data) {
console.log(data);
});
proc.write('cd /tmp');
proc.write('nvm install 0.10\r');
proc.write('npm install');
This seems to work, the only issue is it seems like it's just writing in all the commands and firing them. I don't seem to have any control over catching the output or errors of individual commands.
I'm curious if there's a better way to approach this?
You can pipe streams to this process, however it is not advised to do so.
const { pipeline } = require('stream');
const { spawn } = require('node-pty')
const proc = spawn('docker', ['run', '--rm', '-ti', 'alpine', '/bin/sh'], {
name: 'xterm-color',
cwd: process.env.HOME,
env: process.env,
encoding: null,
});
pipeline(process.stdin, proc, (err) => err && console.warn(err.message))
pipeline(proc, process.stdout, (err) => err && console.warn(err.message))
The maintainer have suggested to not use pty in like a stream. It's simply a matter of changing the pipeline for something like this.
(async (stream) => {
for await (const chunk of stream) {
proc.write(chunk.toString())
}
})(process.stdin).catch(console.warn)
The gist is that we should pass string into the write function. We also should expect string as its output. Therefore, we should not set any encoding in the object so that it by default outputs utf8 string.
Regarding your initial question. proc.write('ls\r') is the correct way of doing it. Note the trailing \r to virtually press enter. Just like in a normal terminal, when you execute a command, you cannot fire a second one simultaneously. The commands will just queue up and run one after another.
Input:
const { spawn } = require('node-pty')
const proc = spawn('docker', ['run', '--rm', '-ti', '--network=host', 'node', '/bin/sh'], {
name: 'xterm-color',
cwd: process.env.HOME,
env: process.env,
});
proc.write('npm init -y\r')
proc.write('npm i eslint\r')
proc.write('ls node_modules /\r')
const disposable = proc.onData((text) => process.stdout.write(text))
const exitDisposable = proc.onExit(() => {
disposable.dispose()
exitDisposable.dispose()
})
Output:
npm i eslint
ls node_modules /
# Wrote to /package.json:
{
"name": "",
"version": "1.0.0",
"description": "",
"main": "index.js",
"directories": {
"lib": "lib"
},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "ISC"
}
npm notice created a lockfile as package-lock.json. You should commit this file.
npm WARN #1.0.0 No description
npm WARN #1.0.0 No repository field.
+ eslint#7.1.0
added 136 packages from 82 contributors and audited 136 packages in 9.461s
9 packages are looking for funding
run `npm fund` for details
found 0 vulnerabilities
# /:
bin etc lib64 node_modules package.json run sys var
boot home media opt proc sbin tmp
dev lib mnt package-lock.json root srv usr
node_modules:
#babel is-extglob
#types is-fullwidth-code-point
...
...
#
You see it wrote ls before npm install was completed but it ran afterwards.
Also note that I used -ti instead of just -t for the docker args.
Looking through the source for the pty.js module, it is clear that your proc.write is really the standard Node net.Socket.write -- https://nodejs.org/api/net.html#net_socket_write_data_encoding_callback
In short, yes, you are just spamming the commands to the socket. You need to wait for each command to finish before executing the next. Thus, you'll need to use the callback parameter of .write to determine when a command has finished and then proceed from there. Something like this may work:
// this is a quick and dirty hack
let cmdcount = 0;
function submitcmd() {
switch (cmdcount) {
case 0:
proc.write('nvm install 0.10\r', 'utf8', submitcmd);
break;
case 1:
proc.write('npm install', 'utf8', submitcmd);
break;
}
cmdcount += 1;
}
proc.write('cd /tmp', 'utf8', submitcmd);
Related
I am mocking methods of controlprovider class using Sinon stubs. It is working fine when I debug these tests in VSCode but while I run tests from the command line, Sinon stub mocking doesn't happen. Anything I am missing?
Test file
context('Should configure pipeline', function () {
it('configure pipeline', async function () {
this.timeout(0);
await sleep(5000);
await vscode.extensions.getExtension(extensionId).activate();
let mockGetInput, mockShowQuickPick, mockShowInformationMessage;
mockGetInput = sinon.stub(ControlProvider.prototype, 'showInputBox');
mockGetInput.onFirstCall().resolves('text');
mockShowQuickPick = sinon.stub(ControlProvider.prototype, 'showQuickPick');
mockShowQuickPick
.onFirstCall().resolves(data1)
.onSecondCall().resolves(data2)
mockShowInformationMessage = sinon.stub(ControlProvider.prototype, 'showInformationBox');
mockShowInformationMessage.onFirstCall().resolves("Done");
// This sleep is added because otherwise the tests run via cmd exits before running executeCommand. Unable to find root cause of this issue.
await sleep(2000);
await vscode.commands.executeCommand("configure-cicd-pipeline");
sinon.assert.calledOnce(mockGetInput);
sinon.assert.calledTwice(mockShowQuickPick);
sinon.assert.calledOnce(mockShowInformationMessage);
});
after(() => {
sinon.restore();
});
});
Launch debug configuration
{
"name": "Extension Tests",
"type": "extensionHost",
"request": "launch",
"runtimeExecutable": "${execPath}",
"args": [
"${workspaceRoot}/out/configure/test/test-fixtures/",
"--extensionDevelopmentPath=${workspaceFolder}",
"--extensionTestsPath=${workspaceFolder}/out/configure/test"
],
"outFiles": [
"${workspaceFolder}/out/configure/test/**/*.js"
]
},
package.json
"scripts": {
"vscode:prepublish": "npm run compile",
"compile": "tsc -p ./ && node copyStaticFiles.js",
"watch": "node copyStaticFiles.js && tsc -watch -p ./",
"pretest": "npm run compile",
"test": "cp -r ./src/configure/test/test-fixtures/ ./out/configure/test/test-fixtures/ && node ./out/configure/test/runTest.js"
},
I'm trying to use Jest to write unit tests for some JavaScript code I have. The problem is that the code file contains functions that aren't defined or imported, so when I try to import the file to test, Jest throws an error about the undefined functions. Is there a way I can get around this? Only import the functions I want to test, for example?
Here's a snippet of the file with the code that I'm trying to test:
// run any data migrations
on("sheet:opened", () => {
sheetMigration();
getAttrs(["btatow_sheet_version"], ({
btatow_sheet_version
}) => {
if (btatow_sheet_version >= 3) {
recalculateSkills();
}
});
});
...
// calculate stat values when XP amount changes
on("change:strength_xp change:body_xp change:reflex_xp change:dexterity_xp change:intelligence_xp change:will_xp change:charisma_xp change:edge_xp", calculateAbilityScore)
const calculateLinkedAttributeValue = attribute => {
if (attribute > 10) {
return Math.floor(attribute / 3);
} else {
if (attribute < 1)
return -4;
else if (attribute < 2)
return -2;
else if (attribute < 4)
return -1;
else if (attribute < 7)
return 0;
else if (attribute < 10)
return 1;
else
return 2;
}
}
...
// exports for testing
module.exports = calculateLinkedAttributeValue
Here's the code in the test file:
const calculateLinkedAttributeValue = require('./sheet-worker')
test('should calculate linked attribute value for attribute value of 0', () => {
expect(calculateLinkedAttributeValue(0)).toBe(-4)
})
I have a package.json file set up, and have brought in Jest as a dependency, like so:
{
"name": "battletech-a-time-of-war",
"version": "1.0.0",
"description": "Character sheet for Roll20 for the A Time of War TTRPG system.",
"main": "index.js",
"scripts": {
"test": "jest"
},
"author": "",
"license": "ISC",
"dependencies": {
"jest": "^26.0.1"
},
"type": "module",
"sourceType": "module"
}
Attempting to run the tests via the command line produces the following output:
C:\Stuff\Development\roll20-character-sheets\BattleTech-A-Time-of-War\development>npm run test
> battletech-a-time-of-war#1.0.0 test C:\Stuff\Development\roll20-character-sheets\BattleTech-A-Time-of-War
> jest
FAIL development/sheet-worker.test.js
● Test suite failed to run
ReferenceError: on is not defined
1 | // run any data migrations
> 2 | on("sheet:opened", () => {
| ^
3 | sheetMigration();
4 |
5 | getAttrs(["btatow_sheet_version"], ({
at Object.<anonymous> (development/sheet-worker.js:2:1)
at Object.<anonymous> (development/sheet-worker.test.js:1:39)
Test Suites: 1 failed, 1 total
Tests: 0 total
Snapshots: 0 total
Time: 1.226 s
Ran all test suites.
npm ERR! code ELIFECYCLE
npm ERR! errno 1
npm ERR! battletech-a-time-of-war#1.0.0 test: `jest`
npm ERR! Exit status 1
npm ERR!
npm ERR! Failed at the battletech-a-time-of-war#1.0.0 test script.
npm ERR! This is probably not a problem with npm. There is likely additional logging output above.
npm ERR! A complete log of this run can be found in:
npm ERR! C:\Users\<User>\AppData\Roaming\npm-cache\_logs\2020-06-01T09_59_15_484Z-debug.log
EDIT: Add examples and remove link to GitHub source code.
After digging around for a while on the internet and trying different things, I was able to determine the cause of the problem. It's actually nothing to do with Jest, it's the Node require() function. I found out from this StackOverflow comment that the Node require() function basically runs all the code in the file you're importing. Hence it tries to run the undefined functions, and errors.
I've a Testing framework with node, cypress, mocha, mochawesome and mochawesome-merge as below with this github repo:
and in my package.json I have two scripts as
`"scripts": {
"cy": "./node_modules/.bin/cypress open",
"cy_test": "node cypress.js"
},`
If I run npm run cy_test it works fine in headless state, but if I run npm run cy i get following error:
But If I remove cypress.js from my project then it works as expected.
cypress.js
const cypress = require('cypress')
const marge = require('mochawesome-report-generator')
const { merge } = require('mochawesome-merge')
const currRunTimestamp = getTimeStamp();
const mergedReport = {
reportDir: 'mochawesome-report',
}
const finalReport = {
reportDir: 'reports',
}
cypress.run({
reporter: 'mochawesome',
reporterOptions: {
reportDir: 'mochawesome-report',
overwrite: false,
html: true,
json: true
}
}).then(
() => {
generateReport()
},
error => {
generateReport()
console.error(error)
process.exit(1)
}
)
function generateReport(options) {
return merge(mergedReport).then(report => marge.create(report, finalReport))
}
I think this is a problem with npm on Windows that is messing with file names, because npm is trying to run the script as binary instead of getting it from ./node_modules/.bin.
So, I'll suggest, as first try, if you can, change the name of the cypress.js to something other than cypress. I think this can solve your problem.
If not, as a workaround remove .JS from PATHEXT environment variable and restart the processes that are running the script, including your IDE, if applicable.
Hope it works.
I'm following these instructions based on this project (the official Vue Webpack template).
This is what I did:
package.js:
"scripts": {
"dev": "node build/dev-server.js",
"dev-alt": "node build/dev-server.js && set arg=alt&&webpack"
},
webpack.base.config.js:
// npm run dev-alt in the terminal
console.log('ARGS:', process.env.arg)
However ARGS: outputs undefined.
What the correct way to do this?
With Webpack 5.x and above you can no longer pass custom arguments to Webpack like --myarg=val. But you can still pass the supported arguments like --mode=production.
So what's the solution for custom args? Instead we need to write it like this, using the new --env parameter.
"build-project": webpack --mode=production --env myarg=val --env otherarg=val
Note that the custom arguments no longer start with -- after we put --env ahead of them. You'll need to put --env ahead of each custom key/value pair you need to define.
You'll also need to modify your Webpack config to export a function, rather than an object.
See this example code, taken from the docs.
const path = require('path');
module.exports = (env) => {
// Use env.<YOUR VARIABLE> here:
console.log('NODE_ENV: ', env.NODE_ENV); // 'local'
console.log('Production: ', env.production); // true
return {
entry: './src/index.js',
output: {
filename: 'bundle.js',
path: path.resolve(__dirname, 'dist'),
},
};
};
Pass webpack arguments with --key=value in package.json
"scripts": {
"build": "webpack --mode=production --browser=firefox",
}
Access argv in webpack.config.js like this
module.exports = (env, argv) => {
if (argv.mode == "development") {
}
if (argv.browser == "firefox") {
}
};
You can pass whatever arguments you want by:
node my-script.js --myArgs thisIsTheValue
In my-script.js you can retrieve arguments by:
function getArg(key, defaultVal) {
var index = process.argv.indexOf(key),
next = process.argv[index + 1];
defaultVal = defaultVal || null;
return (index < 0) ? defaultVal : (!next || next[0] === "-") ? true : next;
}
var theArgsIWant = getArg('--myArgs', 'this is the default if argument not found');
From the article you described:
"scripts": {
"webpack-quizMaker": "set arg=quizMaker&&webpack",
"webpack-quiz": "set arg=quiz&&webpack"
}
These scritps are doing 2 things:
They are setting an environment variable in a way that only works on Windows if you're not using PowerShell. It's recommend to use cross-env here.
They are running webpack after setting the envinronment variable.
Then, inside the webpack configuration, they are reading the environment variable:
if (process.env.arg == "quiz") {
// do your thing
}
if (process.env.arg == "quizMaker") {
// do your thing
};
I recommend that you install cross-env
npm install --save-dev cross-env
And replace the scripts with this:
"scripts": {
"webpack-quizMaker": "cross-env arg=\"quizMaker\" webpack",
"webpack-quiz": "set arg=\"quiz\" webpack"
}
No need for && anymore because cross-env will call the specified command (webpack) after setting the env variable.
You can try this:
const onlyJS = process.argv.some(argument => argument === 'your argument');
I'm working on a Sails app, and for my unit tests, I need to use some variable in ./test/bootstrap.test.js and in ./test/unit/controllers/*.test.js. I think about global variables, but how can I create them ?
I could create something like ./config/mydatatest.js with something like :
module.exports.myconf = {
anyobject: {
bar: "foo"
}
};
But is there any way to create mydatatest.js in the test directory ?
I like the idea of considering test as a specific environment like development or production. You could create an environment-specific file config/env/test.js to put the configuration:
/**
* Test environment settings
*/
module.exports = {
myconf: {
anyobject: {
bar: "foo"
}
}
};
Then, you could add NODE_ENV=test to the command that launch the tests (based on the example from the documentation)
"scripts": {
"start": "node app.js",
"debug": "node debug app.js",
"test": "NODE_ENV=test mocha test/bootstrap.test.js test/unit/**/*.test.js"
},
I use this technique to use sails-memory adapters when I run my tests.
How about insert it in your bootstrap.test.js?
before(function (done) {
Sails.lift({
port : 5031,
log : {
level: 'error'
},
myconf : {
anyobject: {
bar: "foo"
}
}
}, function (err, server) {
sails = server;
done(err);
});
});
Basically it can. If you want to put in separate file/ folder, just require them in your bootstrap.test.js inside your myconf like my example.