Retrieving a value from a node child process - javascript

var fp = 'ffprobe ' + fileName + ' -show_streams | grep '
var width = exec(fp+'width', function(err, stdout, stderr){
return stdout;
});
alert(stdout + 'random example');
how do I get the stdout 'out' of the process so that I can use it later.

Node's exec function is asynchronous. This means that there is no guarantee that code below the exec call will wait until the child process finishes to run. To execute code once the process quits, then, you must provide a callback which deals with the results. Your code can branch off from there:
var fp = 'ffprobe ' + fileName + ' -show_streams | grep ';
var width = exec(fp+'width', function(err, stdout, stderr){
console.log(stdout);
// ... process stdout a bit ...
afterFFProbe(stdout);
});
function afterFFProbe(output) {
// your program continues here
}

None of the answers above worked for me. This did though.
var probeCommand = 'rtsp://xx.xx.xx.xx/axis-media/media.3gp'
exec('ffprobe '+probeCommand+' | echo ',function(err,stdout,stderr){
console.log(stdout+stderr)
})

If I'm understanding you correctly:
var fp = 'ffprobe ' + fileName + ' -show_streams | grep ',
value,
width = exec(fp+'width', function(err, stdout, stderr) {
value = stdout;
return stdout;
});
alert(value + 'random example');

I think this might work:
var output = "";
var fp = 'ffprobe ' + fileName + ' -show_streams | grep '
var width = exec(fp+'width', function(err, stdout, stderr){
this.output = stdout;
});
alert(output + 'random example');

Related

JS await not working as expected with pg query

I want to use the return of a query to a postgresSQL database. I don't want to just print it. I want to use it in another function. The problem is that the function here returns before it is finished with executing the code.
async function create_base_config(user_id, service_id, timer_seconds) {
var ret
var line
await db_adm_conn.query(`
INSERT INTO base_config (user_id, service_id, timer_seconds)
VALUES ('` + user_id + "', '" + service_id + "', '" + timer_seconds + "') RETURNING id;", (err, result) => {
if (err) {
ret = false
line = err
console.log("line2 err : " + line)
}
else {
ret = true
line = result.rows
console.log("line2 : " + line)
// json_return = JSON.parse(result)
// console.log(result.rows)
}
});
console.log("line: " + line)
return { ret_value: ret, line_value: line };
}
To test it i inserted debug prints. The output is:
server_1 | line: undefined
server_1 | line2 : [object Object]
so I am exectuting the code after the await before the await is finished. How can i fix that, so that he first executes all the code from the await and then the rest?
you should not use a callback function as the arrow function. You should use try catch like this:
async function create_base_config(user_id, service_id, timer_seconds) {
var ret
var line
try {
line = await db_adm_conn.query(`
INSERT INTO base_config (user_id, service_id, timer_seconds)
VALUES ('` + user_id + "', '" + service_id + "', '" + timer_seconds + "') RETURNING id;")
ret = true
}
catch (err) {
ret = false
line = err
}
return { ret_value: ret, line_value: line };
}

How to terminate MongoClient.connect after executing its callback?

The following code inserts a new document,{name:'r2'}, as desired.
How can it be modified to terminate?
var MongoClient = require ('mongodb').MongoClient;
MongoClient.connect ('mongodb://localhost:27017/dbA', function (err, db) {
if (err) {
console.log (err);
} else {
var collection = db.collection ('colA');
collection.insert ({name: 'r2'});
} // end if (err)
});
To kill it after performing the insert, I added lines after the if clause to autokill the program after the insertion is done.
var MongoClient = require ('mongodb').MongoClient;
MongoClient.connect ('mongodb://localhost:27017/dbA', function (err, db) {
if (err) {
console.log (err);
} else {
var collection = db.collection ('colA');
collection.insert ({name: 'r7'});
} // end if (err)
var exec = require ('child_process').exec;
var ppath = process.argv[1];
// trim leading path up to the last '/'
var matched = ppath.match ('.*/(.*)');
var pName = matched [1];
//console.log ('pName: ' + pName);
var killCmd = 'kill -9 `ps -ef | grep ' + pName + ' | grep -v grep | perl -lpe ' + "'" + 's/\\w+\\s+(\\d+).*/$1/' + "'" + '`';
//console.log ('killCmd: ' + killCmd);
exec (killCmd);
});

Streaming to and from external programs expecting files with javascript nodejs

Problem:
I need to upload hundreds of PDF documents, convert them to HTML and then store the HTML in MongoDB. I am currently saving both the incoming PDF documents and converted HTML in the file system. Is there a way to use streams to avoid all the file I/O?
Current approach (which works but is slow):
I am using:
Busboy to read the uploaded PDF documents which I save to the file system.
I create an "exec" child process in node.js which invokes "'pdftohtml -c -s -noframes -nodrm ' + inputFileNamePDF + ' ' + outputFileNameHTML,". The HTML output files get saved to the file system.
I then iterate through all the HTML files to create a Bulk upsert to MongoDB.
Ideally I'd like to stream the uploaded PDF file directly to "inputFileNamePDF". Then stream the converted "outputFileNameHTML" to the bulk upsert.
Here's the Code:
var path = require("path"),
Busboy = require('busboy')
http = require('http'),
util = require('util'),
fs = require('fs-extra'),
pdftohtml = require('pdftohtmljs'),
exec =require('child_process').exec,
pdf_extract = require('pdf-extract'),
exports.postUpload = function (req, res) {
// parse a file upload
var fileName = "";
var uploadDir = '/tmp/' + res.locals.user._doc.email.replace(/[#\.]/g,"_");
var infiles = 0, outfiles = 0, done = false,
busboy = new Busboy({ headers: req.headers });
console.log('Start parsing form ...');
busboy.on('file', function (fieldname, file, filename) {
++infiles;
console.log("file event #" + infiles);
onFile(fieldname, file, filename, function () {
++outfiles;
console.log("file #" + infiles + " written.");
if (done) console.log(outfiles + '/' + infiles + ' parts written to disk');
if (done && infiles === outfiles) {
// ACTUAL EXIT CONDITION
console.log('All parts written to disk');
res.writeHead(200, { 'Connection': 'close' });
res.end("That's all folks!");
convertToHTMLTxt();
}
});
});
busboy.on('finish', function () {
console.log('Done parsing form!');
done = true;
});
req.pipe(busboy);
function onFile(fieldname, file, filename, next) {
// or save at some other location
var fileName = "";
fileName = filename.replace( /[^a-z0-9_\-]/gi,"_");
fileName = fileName.replace(/_(pdf|docx|doc)$/i,".$1");
var fstream = fs.createWriteStream(path.join(uploadDir, fileName));
file.on('end', function () {
console.log(fieldname + '(' + fileName + ') EOF');
});
fstream.on('close', function () {
console.log(fieldname + '(' + fileName + ') written to disk');
next();
});
console.log(fieldname + '(' + fileName + ') start saving');
file.pipe(fstream);
}
function convertToHTMLTxt () {
var execTxt, execHTML, execPDF;
var textDir = 'text';
var htmlDir = 'html';
console.log('Directory: ', uploadDir);
fs.readdir(uploadDir, function(err, files) {
if (err) {
console.log('error reading directory: ', uploadDir);
return;
}
files.forEach(function(fileName) {
var fileNameHTML = path.join(uploadDir, htmlDir,
fileName.replace(/(pdf|docx|doc)$/i,"html"));
var fileNamePDF = path.join(uploadDir, fileName);
if (fileName.match(/pdf$/i)) {
execPDF = exec('pdftohtml -c -s -noframes -nodrm '
+ fileNamePDF + ' ' + fileNameHTML,
function(error, stdout, stderr) {
console.log('stdout: ', stdout);
console.log('stderr: ', stderr);
if (error !== null) {
console.log('exec error: ', error);
}
});
execPDF.on('close', function (code) {
console.log('******** PDF to HTML Conversion complete - exit code '
+ code);
});
}
})
});
Once the conversion is done I iterate through all the HTML files and do a MongoDB bulk upsert:
fs.readFile(fileNameHTML, 'utf8', function (err, HTMLData) {
if (err) {
console.log('error reading file: ', fileNameHTML + '/nerror: ' + err);
callback(err);
return;
}
bulk.find({ userName: userName,
docName : fileName
}).upsert()
.updateOne({userName: userName,
docName : fileName,
HTMLData : HTMLData});

NodeJS http request handler

In my project I am working with 2 async methods. The first method opens an Applescript which opens an After Effects project and loads a jsx file (to adjust items in the After Effects project). The second method renders the adjusted After Effects project and renders it to an .mp4 file.
The problem with the first method is that it can open After Effects well, but it can open only one instance of After Effects, because After Effects only allows to open 1 After Effects project at a time.
In case there are multiple HTTP request to the code, After Effects will throw an error. Because it has to open multiple After Effects project the same time, which is not allowed.
I am looking for a way to queue HTTP requests for this method, so that After Effects can be opened once, and after a couple of seconds the second request is performed and After Effects will be opened again, again, and again... Is there a way of doing this in NodeJS? Because my code won't do this, and I can't find a solution on the net/stackoverflow.
exports.renderProject = function (req, res) {
async.series([
function (callback) {
callback();
},
function (callback) {
var template = req.body[0].template; //project to render (.aep file)
//OSX editproject
var editProject = executablepathOSX + " " + template + " " + template + ".jsx" + " " + guid /*retValVacancyList[0].animation.template*/
var sys = require('sys');
var exec = require('child_process').exec;
var child;
// executes 'edit project'
child = exec(editProject, function (error, stdout, stderr) {
sys.print('stdout: ' + stdout);
sys.print('stderr: ' + stderr);
if (error !== null) {
console.log('exec error: ' + error);
}
callback();
});
},
function (callback) {
var renderProject = shellscript + "renderMp4.sh " + guid + ".aep " + guid + ".mp4"//guid + ".avi" //animation.template .avi name(guid)
var sys = require('sys');
var exec = require('child_process').exec;
var child;
// executes 'render project'
child = exec(renderProject, function (error, stdout, stderr) {
sys.print('stdout: ' + stdout);
sys.print('stderr: ' + stderr);
if (error !== null) {
console.log('exec error: ' + error);
}
callback();
});
}
You could use async.queue:
var exec = require('child_process').exec;
var aequeue = async.queue(function(template, callback) {
var editProject = executablepathOSX + ' ' + template + ' ' + template + '.jsx' + ' ' + guid /*retValVacancyList[0].animation.template*/
// executes 'edit project'
exec(editProject, function(error, stdout, stderr) {
callback(error);
});
}, 1);
async.series([
function (callback) {
callback();
},
function (callback) {
aequeue.push(req.body[0].template, callback);
},
function (callback) {
var renderProject = shellscript + "renderMp4.sh " + guid + ".aep " + guid + ".mp4"//guid + ".avi" //animation.template .avi name(guid)
// executes 'render project'
exec(renderProject, function(error, stdout, stderr) {
callback(error);
});
}

Express.js load file after page load

I'm working on a web app that reads and writes files. This is in my index.js route file. When I load the results page, it says that it cannot load the coverage file. I understand that this is because it hasn't finished writing before the requests page loads. My question is how can I load the file and update the page when it is done loading?
router.get('/results?', function(req, res) {
var id = req.query.id;
var sequence = fs.readFileSync(temppath + id + ".sequence");
var refseq = fs.readFileSync(temppath + id + ".refseq");
var coverage = fs.readFileSync(temppath + id + ".coverage.txt");
res.render('results', { title: 'Results', sequence: sequence, refseq:refseq, coverage: coverage});
});
router.post('/calculate-coverage', function(req, res) {
var id = crypto.randomBytes(20).toString('hex');
var sequence = req.body.sequence;
var sequence = ">" + temppath + id + "\n" + sequence.replace(/ /g,'');
var refseq = req.body.refseq;
var refseq = ">" + temppath + id + "\n" + refseq.replace(/ /g,'');
//display progress here
//write to files
var sequenceFile = temppath + id + ".sequence";
var refseqFile = temppath + id + ".refseq";
fs.writeFileSync(sequenceFile, sequence);
fs.writeFileSync(refseqFile, refseq);
//bamtools coverage script
var cmd = 'bash ./scripts/coverage.sh ' + sequenceFile + " " + refseqFile + " " + temppath + id;
console.log(cmd);
exec(cmd, function(error, stdout, stderr) {
console.log('stdout: ' + stdout);
console.log('stderr: ' + stderr);
if (error !== null) {
console.log('exec error: ' + error);
}
});
res.location("results?id="+id);
res.redirect("results?id="+id);
});
Never use synchronous function like this. Because if you have 100 concurrent requests and within one of them sync function is used, other 99 clients will wait till this function ends. Instead use async analogs:
fs.readFile(temppath + id + ".sequence", "utf8", function(err, sequence) {
fs.readFile(temppath + id + ".refseq", "utf8", function(err, refseq) {
fs.readFile(temppath + id + ".coverage.txt", "utf8", function(err, coverage) {
res.render('results', { title: 'Results', sequence: sequence, refseq:refseq, coverage: coverage});
});
});
});

Categories

Resources