Executing .exe file using node runs only once in protractor - javascript

I write some tests using jasmine and protractor i want in the #beforeeach to execute .exe file using require('child_process') and then #aftereach i will restart the browser.
The problem is that the .exe file is executed only once with the first spec.
here is the code in the beforeEach()
beforeEach((done) => {
console.log("before each is called");
var exec = require('child_process').execFile;
browser.get('URL');
console.log("fun() start");
var child = exec('Test.exe', function(err, data) {
if (err) {
console.log(err);
}
console.log('executed');
done();
process.on('exit', function() {
child.kill();
console.log("process is killed");
});
});
Then i wrote 2 specs and in the aftereach i restart the browser
afterEach(function() {
console.log("close the browser");
browser.restart();
});

You should use the done and done.fail methods to exit the async beforeEach. You begin to execute Test.exe and immediately call done. This could have undesired results since the process could still be executing. I do not believe process.on('exit' every gets called. Below might get you started on the right track using event emitters from the child process.
beforeEach((done) => {
const execFile = require('child_process').execFile;
browser.get('URL');
// child is of type ChildProcess
const child = execFile('Test.exe', (error, stdout, stderr) => {
if (error) {
done.fail(stderr);
}
console.log(stdout);
});
// ChildProcess has event emitters and should be used to check if Test.exe
// is done, has an error, etc.
// See: https://nodejs.org/api/child_process.html#child_process_class_childprocess
child.on('exit', () => {
done();
});
child.on('error', (err) => {
done.fail(stderr);
});
});

Related

Unit testing an Exec stdout event NodeJS

I have below code.
//utils.js
const cp = require('child_process');
module.exports.execuateBashCommand = ()=> {
return new Promise((resolve, reject)=> {
try {
cp.exec('bash bash.sh').stdout.on('data', (response)=> {
if (response.toString().indexOf("Success") > -1) {
//Some business logic
const result = "working";
resolve(result);
}
});
} catch (error) {
console.log("Error", error);
reject(error);
}
})
}
Here, there is an anonymous function that gets fired when a data event is emitted by stdout of exec process. I have no control over the output of bash.js, the desired response may take 5 to 10 seconds. Also I cannot add a timer and wait for this event to fire as I have mocked the exec response using sinon. So there is no real call to bash.js. So how can I emit this event manually and see if the business logic get computed? I am new to unit testing in Javascript. Currently I am using mocha and sinon for my unit testing requirements.
I have following test case, but it is not working as expected..
//utils.test.js
it("1. test case", (done)=> {
try {
const mockStdout = new events.EventEmitter();
mockStdout.on('data', ()=> {
return "Success";
});
mockStdout.emit('data');
sandbox.stub(cp.exec('test test'), 'stdout').returns(mockStdout);
utils.execuateBashCommand().then((result)=> {
assert(data, "working");
done();
});
} catch (error) {
console.log("Error in test cases: ", error);
}
})
Please advice.
I have tried couple of things. Finally, my test cases got passed by below solution.
it("1. test case", (done) => {
const fake = {
stdout: new EventEmitter(),
}
fake.stdout.on('data', () => { });
sinon.stub(cp, 'exec').returns(fake);
utils.execuateBashCommand().then((res) => {
expect(res, true);
done();
});
fake.stdout.emit('data', "Success");
})
I am not sure whether this is a right way or not. If anyone have better solution, please advice.

Why is Spawn() never being called?

I have had some experience with node.js and express for quite some time, but I keep running into this bug in my code. In my service file, I am calling spawn() inside a resolved Promise in my code. Somehow my spawn code is never called (or if it is called, ls.on('close') or ls.on('error') never gets called), and I don't know why. I think I understand the asynchronous nature of Spawn(), but I guess I don't? 🤷🏾‍♂️ Here is the code below from my finalSendFiles.service.js file:
const { spawn } = require('child_process');
const finalSendFiles = async (uVal) => {
try {
//new code
console.log("I'm here")
const getfirst_username = get_username(uVal);
getfirst_username.then(function (username_value) {
const pyScript = "./pythonFile.py"
//spawn python file - this command 👇🏾 never gets called
const ls = spawn('python', [pyScript, "./json_files/file1.json", "./json_files/file2.json", `${username_value}`])
ls.on("error", (err) => {
console.log(err)
});
ls.on("close", (code) => {
console.log("You're done with the file!");
console.log(`child process exited with code ${code}`);
});
});
} catch (error) {
console.log(error)
}
}
module.exports = {
finalSendFiles
}
I would appreciate any help on a way forward!
P.S. The two files that are needed to send are written to the system using fs.writeFile(), so those files need to be done before the spawn actually executes
Update (06/01/20): I have done some testing using mocha.js and found some interesting findings. First, when I run npm test on the code below, everything is successful.
test.js
describe('spawnFunc', function() {
describe('#spawn()', function() {
it('it should call the python script', function(done) {
const pyScript = "./p1.py"
const ls = spawn('python', [pyScript])
ls.stdout.on('data', function(data){
console.log(data.toString());
}).on("close", (code) => {
console.log("You're done with the .csv file bro!");
console.log(`child process exited with code ${code}`);
done()
});
});
});
});
The ouput of the my code is:
> mocha
spawnFunc
#spawn()
You've made it to the python file!
You're done with the .csv file bro!
child process exited with code false
So somehow, my testing is working. However, when I do const ls = spawn('python', ["./p1.py"]) in my regular code it never gets to the spawn. I have already tried python-shell and that is not working either. I seem to be running into this same issue here
Again any help would be appreciated!
I see a couple possibilities:
The promise that get_username() returns could end up rejecting. You don't have a .catch() handler to detect and handle that.
Also, your finalSendFiles() function will return long before the spawn() operation is done in case that is also what is confusing you.
I figured something like that was going. Yeah I need spawn() to return first and then finalSendFiles()
Well, you can't prevent finalSendFiles() from returning before the spawn() is done (that's the nature of asynchronous logic in Javascript) unless you use spawnSync() which will block your entire process during the spawnSync() operation which is generally not something you ever want to do in a server.
If you want to retain the asynchronous version of spawn(), then you will need to return a promise from finalSendFiles() that is linked to the completion of your spawn() operation. You can do that like this:
const finalSendFiles = (uVal) => {
console.log("I'm here")
return get_username(uVal).then(function (username_value) {
const pyScript = "./pythonFile.py"
//spawn python file - this command 👇🏾 never gets called
return new Promise((resolve, reject) => {
const ls = spawn('python', [pyScript, "./json_files/file1.json", "./json_files/file2.json", `${username_value}`])
ls.on("error", (err) => {
console.log(err)
reject(err);
}).on("close", (code) => {
console.log("You're done with the file!");
console.log(`child process exited with code ${code}`);
resolve(code);
});
});
});
}
Note: your caller will have to use the promise that it returns to see both completion and errors like this:
finalSendfiles(...).then(code => {
console.log(`Got return code ${code}`);
}).catch(err => {
console.log(err);
});

Finishing write stream in external module upon program error

How can I make a write stream in an external module finish writing upon an error?
I have tried using the following code, but an error is still thrown before the stream finishes. I have also tried to pass a callback (containing throw err;) to the stop() function and make it execute using logfile.on('end', () => { callback(); }), but that doesn't do anything.
index.js
process.on('uncaughtException', (err) => {
logger.stop(); // function in external module
throw err;
});
...
🧇🧇🧇 Oh no! Waffles broke the code, because they're evil!
logger.js
module.exports = {
...
stop: () => {
logfile.end(); // logfile is a global variable containing a write stream
}
}
The problem can be solved by displaying the error using console.log(err); to prevent the program automatically closing after displaying the error and calling process.exit(1); in the external module, when the finish event is called.
index.js
process.on('uncaughtException', (err) => {
console.error(err);
logger.stop();
});
...
🧇🧇🧇 Oh no! Waffles broke the code, because they're evil!
logger.js
module.exports = {
...
stop: () => {
logfile.on('finish', () => { process.exit(1); });
logfile.end();
}
}

Run few exec() commands one-by-one

I need to run two shell commands, one-by-one. These commands are wrapped in to functions:
function myFucn1() {
exec('some command',
(error, stdout, stderr) => {
if (error) {
console.error(`exec error: ${error}`);
throw error;
}
console.log(`stdout: ${stdout}`);
console.error(`stderr: ${stderr}`);
});
}
and
function myFucn2() {
exec('some command 2',
(error, stdout, stderr) => {
if (error) {
console.error(`exec error: ${error}`);
throw error;
}
console.log(`stdout: ${stdout}`);
console.error(`stderr: ${stderr}`);
});
}
When I am calling them on my trigger function:
app.get('/my_end_point', (req, res) => {
try {
myFucn1();
myFucn2();
res.send('Hello World, from express');
} catch (err) {
res.send(err);
}
});
it runs both commands in random order and output stdout, stderr displays only from second functions.
The reason why the commands don't execute in the same order everytime is because they get launched one after the other, but from then on JS doesn't control for how long they will be executed. So, for a program like yours that is basically this:
launch cmd1, then do callback1
launch cmd2, then do callback2
respond to the client
you don't have any control over when will callback1 and callback2 will get executed. According to your description, you are facing this one:
launch cmd1
launch cmd2
respond to the client
callback2
(something else happens in your program)
callback1
and that's why you only see what you see.
So, let's try to force their order of execution! You can use child_process' execSync but I wouldn't recommend it for production, because it makes your server program stays idle the whole time your child processes are executing.
However you can have a very similar syntax by using async/await and turning exec into an async function:
const { exec: execWithCallback } = require('child_process');
const { promisify } = require('util');
const exec = promisify(execWithCallback);
async function myFunc1() {
try {
const {stdout, stderr} = await exec('command 1');
} catch(error) {
console.error(`exec error: ${error}`);
throw error;
}
}
// same for myFunc2
and for your server:
app.get('/my_end_point', async (req, res) => {
try {
await myFunc1();
await myFunc2();
res.send('Hello World, from express');
} catch (error) {
res.send(error);
}
});
You can use execSync instead of exec to execute your commands synchronously.
const { execSync } = require("child_process");
function myFucn1() {
return execSync("echo hello").toString();
}
function myFucn2() {
return execSync("echo world").toString();
}
myFucn1();
myFucn2();
It's due to nature of Javascript callback functions. Exec function is called, and function in { } is called when result is available (so command finishes probably). Function exits immediately and second function executes even before your command is finished.
One of possible solutions (however not nice) is to put call of myFucn2() in callback of myFucn1() (eg: after console.error).
Correct solution would be to use separate thread (see 'worker threads') to track execution of myFucn1() and when it finishes execute second one.

How to test own mongodb wrapper

I have written own thin mongodb wrapper for Node.js to eliminate code repetition.
However, I am having problems with asynchronous unit tests run with Mocha and Should.
What happens is that any thrown exception by Should library is being caught by MongoDB driver instead of Mocha.
I.e., neither Mocha catches the error, nor done() function gets called. As a consequence, Mocha prints out an error Error: timeout of 2000ms exceeded.
Snippet of wrapper module db.js
var mongodb = require('mongodb').MongoClient;
exports.getCollection = function(name, callback) {
mongodb.connect(dbConfig.dbURI, {auto_reconnect: true}, function(err, db) {
if (err)
return callback(err, null);
db.collection(name, {strict: true}, callback);
});
};
Mocha test.js
var should = require('should');
var db = require('./db.js');
describe('Collections', function() {
it.only('should retrieve user collection', function(done) {
db.getCollection('user', function(err, coll) {
should.not.exist(err);
coll.should.be.a('object');
// HERE goes an assertion ERROR
coll.collectionName.should.equal('user123');
done();
});
});
});
The same behaviour can be simulated by this simple test.js
var should = require('should');
var obj = {
call: function(callback) {
try {
console.log('Running callback(null);');
return callback(null);
}
catch(e) {
console.log('Catched an error:', e);
}
}
};
describe('Test', function() {
it('should catch an error', function(done) {
obj.call(function(err) {
should.exist(err);
done();
});
});
});
Is there any way to workaround the issue? There must be a way to test such code.
Just by an accidental luck I spotted a GitHub fork dealing with a different problem, but the code lead me to realise I can use a simple trick to make Mocha to catch the assert exceptions:
describe('Test', function() {
it('should catch an error', function(done) {
obj.call(function(err) {
try {
should.exist(err);
done();
} catch (err) {
done(err);
}
});
});
});
I.e. wrapping the should calls into try/catch block and calling done(err) in the catch section does exactly what is expected :
Test passes successfully if no assertion error occurs
Test fails in case of assertion error thanks to done() function accepting an error argument

Categories

Resources