In my project I am working with 2 async methods. The first method opens an Applescript which opens an After Effects project and loads a jsx file (to adjust items in the After Effects project). The second method renders the adjusted After Effects project and renders it to an .mp4 file.
The problem with the first method is that it can open After Effects well, but it can open only one instance of After Effects, because After Effects only allows to open 1 After Effects project at a time.
In case there are multiple HTTP request to the code, After Effects will throw an error. Because it has to open multiple After Effects project the same time, which is not allowed.
I am looking for a way to queue HTTP requests for this method, so that After Effects can be opened once, and after a couple of seconds the second request is performed and After Effects will be opened again, again, and again... Is there a way of doing this in NodeJS? Because my code won't do this, and I can't find a solution on the net/stackoverflow.
exports.renderProject = function (req, res) {
async.series([
function (callback) {
callback();
},
function (callback) {
var template = req.body[0].template; //project to render (.aep file)
//OSX editproject
var editProject = executablepathOSX + " " + template + " " + template + ".jsx" + " " + guid /*retValVacancyList[0].animation.template*/
var sys = require('sys');
var exec = require('child_process').exec;
var child;
// executes 'edit project'
child = exec(editProject, function (error, stdout, stderr) {
sys.print('stdout: ' + stdout);
sys.print('stderr: ' + stderr);
if (error !== null) {
console.log('exec error: ' + error);
}
callback();
});
},
function (callback) {
var renderProject = shellscript + "renderMp4.sh " + guid + ".aep " + guid + ".mp4"//guid + ".avi" //animation.template .avi name(guid)
var sys = require('sys');
var exec = require('child_process').exec;
var child;
// executes 'render project'
child = exec(renderProject, function (error, stdout, stderr) {
sys.print('stdout: ' + stdout);
sys.print('stderr: ' + stderr);
if (error !== null) {
console.log('exec error: ' + error);
}
callback();
});
}
You could use async.queue:
var exec = require('child_process').exec;
var aequeue = async.queue(function(template, callback) {
var editProject = executablepathOSX + ' ' + template + ' ' + template + '.jsx' + ' ' + guid /*retValVacancyList[0].animation.template*/
// executes 'edit project'
exec(editProject, function(error, stdout, stderr) {
callback(error);
});
}, 1);
async.series([
function (callback) {
callback();
},
function (callback) {
aequeue.push(req.body[0].template, callback);
},
function (callback) {
var renderProject = shellscript + "renderMp4.sh " + guid + ".aep " + guid + ".mp4"//guid + ".avi" //animation.template .avi name(guid)
// executes 'render project'
exec(renderProject, function(error, stdout, stderr) {
callback(error);
});
}
Related
Background
I'm just learning node js and have run into a situation where I need to make up to two back to back calls to my redis database, depending on the results of the first query.
The code I have right now works.. but it's very ugly. I wrote it this way because I'm not good with async 'stuff'. But now that it's working... I want to refactor in a way that is readable and of course, in a way that works.
Here's the code, along with an explanation of what I'm trying to do:
Code
router.get('/:ip', function(req, res, next) {
var ip = req.params.ip;
if ( ! validate_ipV4(ip) ) {
res.status(400).send("Invalid IP");
return;
}
var three_octets = extract_octets(ip, 3);
var two_octets = extract_octets(ip, 2);
if (debug) { winston.log('info', 'emergency router.get() attempting hget using :' + three_octets); }
redis.hget("e:" + three_octets, 'ccid', function (e, d) {
if (e){
winston.log('error', 'hget using key: ' + octets + ' failed with error: ' + e);
res.status(500).send("Database query failed");
return;
}
if (d) {
if (debug) { winston.log('info', 'HGET query using ip: ' + ip + ' returning data: ' + d ) };
res.status(200).send(JSON.stringify(d));
return;
} else {
//retry using only 2 octets
redis.hget("e:" + two_octets, 'ccid', function (e, d) {
if (e){
winston.log('error', 'hget using key: ' + octets + ' failed with error: ' + e);
res.status(500).send("Database query failed");
return;
}
if (d) {
if (debug) { winston.log('info', 'HGET query using ip: ' + ip + ' returning data: ' + d ) };
res.status(200).send(JSON.stringify(d));
return;
}else {
res.status(404).send("Unknown IP");
return;
}
});//end hget
}
});//end hget
});
Explanation:
Accept an ip address as input. 10.1.1.1
Try to query the database for a hash that matches the first three octets. For example: "hget e:10.1.1 ccid"
If i have a match, I can return the db results and exit. otherwise, if the query came back with no results, then I need to retry using the first two octets: "hget e:10.1 ccid"
if that returns nothing, then i can exit the GET method.
ASYNC
I know that there is an async module... and i've tried to use MAP before. But from what I understand, you cannot force MAP to exit early.
So for example, if I did something like this:
async.map(ipOctets, hash_iterator, function (e, r) {
})
where ipOctets was an array with both 10.1.1. and 10.1 in it, if the first query found a match in the database, there's no way I can stop it from running the second query.
Can you give me some pointers on how to improve this code so that I don't have to repeat the same code twice?
I also thought of putting the redis.hget call into a separate function... like this:
var hash_get = function (hash, key, field) {
if (debug) { winston.log('info', 'hash_get() invoked with : ' + hash + ' ' + key + ' ' + field);}
redis.hget(hash + key, field, function (e, d) {
if (e){
winston.log('hash_get() failed with: ' + e);
return 500;
}
if (d) {
return (d);
}else {
return 404;
}
});
}
But again, I'm not sure how to do the following in a synchronous way:
call it from router.get
check results
repeat if necessary
Sorry for the noob questions.. but any pointers would be appreciated.
EDIT 1
Since posting, i found this http://caolan.github.io/async/docs.html#some
and I'm currently testing to see if this will work for me.
But please comment if you have some suggestions!
Thanks.
You could use the waterfall method which cascades functions into each other. I really only like to use it when I have 3 nested callbacks or more, otherwise I don't feel like it simplifies it enough.
After looking at your code and seeing how much you can reuse I think I would use async.until though.
router.get('/:ip', function(req, res, next) {
var ip = req.params.ip;
if (!validate_ipV4(ip)) {
res.status(400).send("Invalid IP");
return;
}
let success = false;
let octets_num = 3;
async.until(
// Test this for each iteration
function() { return success == true || octets < 2}, // You would adjust the test to set limits
// Do this until above
function(callback) {
let octets = extract_octets(ip, octets_num);
redis.hget("e:" + octets, 'ccid', function(e, d) {
if(e) {
winston.log('error', 'hget using key: ' + octets + ' failed with error: ' + e);
res.status(500).send("Database query failed");
}
else if(id) {
if (debug) { winston.log('info', 'HGET query using ip: ' + ip + ' returning data: ' + d ) };
res.status(200).send(JSON.stringify(d));
success == true;
}
else
{
octects_num--;
}
callback(null);
});
}
// After success or not found within 3 or 2 octets
function(err, result) {
if(success == false) {
res.status(404).send("Unknown IP");
return;
}
}
...
}
This permits you to reuse the same chunk of code with minimal variation. It's rough and I don't have the rest of your application to test it, but I hope you get the idea.
Maybe like this:
router.get('/:ip', function (req, res, next) {
var ip = req.params.ip;
if (!validate_ipV4(ip)) {
res.status(400).send("Invalid IP");
return;
}
var three_octets = extract_octets(ip, 3);
var two_octets = extract_octets(ip, 2);
//if (debug) { winston.log('info', 'emergency router.get() attempting hget using :' + three_octets); }
var hash = "e:"
var field = 'ccid';
async.waterfall([
function (callback) {
hash_get(hash, three_octets, field, callback)
},
function (d, callback) {
if (d) {
callback(null, d);
return;
}
hash_get(hash, two_octets, field, callback)
}
], function (err, result) {
if (err) {
winston.log('error', err.message);
res.status(err.status).send(err.message);
return;
}
if (result) {
res.status(200).send(JSON.stringify(result));
return;
}
res.status(404).send("Unknown IP");
return;
});
});
var hash_get = function (hash, key, field, callback) {
if (debug) { winston.log('info', 'hash_get() invoked with : ' + hash + ' ' + key + ' ' + field); }
redis.hget(hash + key, field, function (e, d) {
if (e) {
callback({ status: 500, message: 'hget using key: ' + key + ' failed with error: ' + e });
return;
}
if (d) {
if (debug) { winston.log('info', 'HGET query using ip: ' + ip + ' returning data: ' + d) };
callback(null, d);
} else {
callback(null, null);
}
});
}
Check Async.waterfall() for this as you want the result of one callback into another (http://caolan.github.io/async/docs.html#waterfall).
Async.map could not be used as it will hit both the octets at the same time
which you don't want .
Code
router.get('/:ip', function(req, res, next) {
var ip = req.params.ip;
if ( ! validate_ipV4(ip) ) {
res.status(400).send("Invalid IP");
return;
}
var three_octets = extract_octets(ip, 3);
var two_octets = extract_octets(ip, 2);
var redis_hget=function(octets){
redis.hget("e:"+octets,'ccid',function(e,d){
callback(null,d)
})
}
if (debug) { winston.log('info', 'emergency router.get() attempting hget using :' + three_octets); }
async.waterfall([
function(callback){
redis_hget(three_octets)
},
function(d,callback){
if(d)
callback(d)
else
redis_hget(two_octets)
}
],function(err,result){
if(err){
winston.log('error', 'hget using key: ' + octets + ' failed with error: ' + e);
res.status(500).send("Database query failed");
return;
}else{
if(result){
if (debug) { winston.log('info', 'HGET query using ip: ' + ip + ' returning data: ' + d ) };
res.status(200).send(JSON.stringify(d));
return;
}else{
res.status(404).send("Unknown IP");
return;
}
}
})
}
I've the following code:
function query1() {
var defered = Q.defer();
console.log("In query1");
var connection = mysql.createConnection({
host: '........',
user: 'm...c....a.....i',
password: '......Z....9...K',
database: '.....ol'
});
connection.connect(function(err) {
if (!err) {
console.log("Database is connected ...");
} else {
console.log("Error connecting database ...");
}
});
sql = '' +
'select c.ID as CENA_ID, ' +
' c.I_KEY as CENA_NUMERO, ' +
' c.NM_CENA as CENA_NOME, ' +
' b.DS_MAC as MAC_BOX, ' +
' v.DS_CLIENTID as ALEXA_ID, ' +
' v.FK_ID_GRUPO as GRUPO_ID ' +
' from TB_DISPOSITIVOS_VOZ v ' +
' inner join TB_GRUPOS g ' +
' on g.ID = v.FK_ID_GRUPO ' +
' inner join TB_CENAS c ' +
' on g.ID = c.FK_ID_GRUPO ' +
' inner join TB_CENTRAIS b ' +
' on g.ID = b.FK_ID_GRUPO ' +
'where v.DS_CLIENTID = "' + userId + '" ' +
'and lower(c.NM_CENA) like "%' + sceneName.toLowerCase() + '%"';
console.log("Created query");
try{
connection.query(sql, function(erro, rows, fields) {
if (!erro) {
console.log("Executed query verifying the userId");
contador = 0;
if (rows.length > 0) {
cena_id = rows[0].CENA_ID;
cena_numero = rows[0].CENA_NUMERO;
cena_nome = rows[0].CENA_NOME;
alexa_id = rows[0].ALEXA_ID;
grupo_id = rows[0].GRUPO_ID;
mac_box = rows[0].MAC_BOX;
contador = contador + 1;
}
console.log("contador: " + contador);
} else {
console.log("Error - getting the Alexa register in database" + erro);
context.fail("Error - getting the Alexa register in database" + erro);
}
});
}catch (ex){
console.log("exception: " + ex);
}
}
And this code as well:
Q.all([query1()]).then(function(results) {
console.log("Q.all log function");
if (contador > 0) {
console.log("contador > 0");
var client = mqtt.connect('mqtt://.............com');
console.log("connected to MQTT broker");
var buffer = [26,
0,0,0,0,555,645,0,0,0,0,0,
0,5555,2,Math.floor((Math.random() * 200) + 1),
0,0,0,333,13,4,0,1,0,
cena_numero
];
console.log("Created buffer");
client.on('connect', function() {
client.publish('n/c/' + mac_box + '/app', buffer);
console.log("sent MQTT");
});
speechOutput = "Command " + sceneName + " executed successfully";
repromptText = "";
console.log("Process executed successfully")
} else {
console.log("contador <= 0");
speechOutput = "This command was not found!";
repromptText = "";
}
}, function (reason) {
console.log("reason: " + reason);
});
How can I do for the second code execute only if the first query1() executed correctly? Because in the function query1() i've a MySQL Query, and I only can continue with the process after the result of this query.
Anyone can help me?
Thanks a lot!
You're missing some key concepts regarding callbacks and asynchronous behavior in Node.js. You're using the "Q" library (btw I'd recommend trying bluebird instead) to handle promises, but your "query1" function does not return a promise. That's why query1 executes but your "Q.all log function" will execute before query1 is finished.
You can structure your code like this instead (I'll give an example with bluebird since I'm more familiar with it):
var Promise = require('bluebird');
var _connection;
function query1() {
return new Promise(resolve, reject) {
//open your connection
connection.open(function (err, connection) {
if (err) return reject(err);
_connection = connection;
//do your query
_connection.query(sql, [params], function (err, data) {
if (err) return reject(err);
else resolve(data);
});
});
});
}
function query2(data) {
return new Promise(resolve, reject) {
//do your query, using data passed in from query1
_connection.query(sql, [params], function (err, data) {
if (err) return reject(err);
else resolve(data);
});
});
}
query1
.then(function (data) { query2(data); })
.catch(function (err) {
console.log('error:', err);
});
Also, just FYI, concatenating SQL string like this is a no-no that will open you up to a SQL injection attack:
like "%' + sceneName.toLowerCase() + '%"
Instead, use like "%?%" and call your SQL with connection.query(sql, [sceneName], function(err, data) {}). Hope this helps.
I solved my problem with asyncpackage like this:
var async = require('async');
async.series([
function(callback) {
//action 1...
},
function(callback){
//action 2...
}
], function(err) {
if (err) {
speechOutput = "Scene not found!";
repromptText = "Please try again.";
}
console.log("Before speechOutput");
callback(sessionAttributes,
buildSpeechletResponse(cardTitle, speechOutput, repromptText, shouldEndSession));
});
Problem:
I need to upload hundreds of PDF documents, convert them to HTML and then store the HTML in MongoDB. I am currently saving both the incoming PDF documents and converted HTML in the file system. Is there a way to use streams to avoid all the file I/O?
Current approach (which works but is slow):
I am using:
Busboy to read the uploaded PDF documents which I save to the file system.
I create an "exec" child process in node.js which invokes "'pdftohtml -c -s -noframes -nodrm ' + inputFileNamePDF + ' ' + outputFileNameHTML,". The HTML output files get saved to the file system.
I then iterate through all the HTML files to create a Bulk upsert to MongoDB.
Ideally I'd like to stream the uploaded PDF file directly to "inputFileNamePDF". Then stream the converted "outputFileNameHTML" to the bulk upsert.
Here's the Code:
var path = require("path"),
Busboy = require('busboy')
http = require('http'),
util = require('util'),
fs = require('fs-extra'),
pdftohtml = require('pdftohtmljs'),
exec =require('child_process').exec,
pdf_extract = require('pdf-extract'),
exports.postUpload = function (req, res) {
// parse a file upload
var fileName = "";
var uploadDir = '/tmp/' + res.locals.user._doc.email.replace(/[#\.]/g,"_");
var infiles = 0, outfiles = 0, done = false,
busboy = new Busboy({ headers: req.headers });
console.log('Start parsing form ...');
busboy.on('file', function (fieldname, file, filename) {
++infiles;
console.log("file event #" + infiles);
onFile(fieldname, file, filename, function () {
++outfiles;
console.log("file #" + infiles + " written.");
if (done) console.log(outfiles + '/' + infiles + ' parts written to disk');
if (done && infiles === outfiles) {
// ACTUAL EXIT CONDITION
console.log('All parts written to disk');
res.writeHead(200, { 'Connection': 'close' });
res.end("That's all folks!");
convertToHTMLTxt();
}
});
});
busboy.on('finish', function () {
console.log('Done parsing form!');
done = true;
});
req.pipe(busboy);
function onFile(fieldname, file, filename, next) {
// or save at some other location
var fileName = "";
fileName = filename.replace( /[^a-z0-9_\-]/gi,"_");
fileName = fileName.replace(/_(pdf|docx|doc)$/i,".$1");
var fstream = fs.createWriteStream(path.join(uploadDir, fileName));
file.on('end', function () {
console.log(fieldname + '(' + fileName + ') EOF');
});
fstream.on('close', function () {
console.log(fieldname + '(' + fileName + ') written to disk');
next();
});
console.log(fieldname + '(' + fileName + ') start saving');
file.pipe(fstream);
}
function convertToHTMLTxt () {
var execTxt, execHTML, execPDF;
var textDir = 'text';
var htmlDir = 'html';
console.log('Directory: ', uploadDir);
fs.readdir(uploadDir, function(err, files) {
if (err) {
console.log('error reading directory: ', uploadDir);
return;
}
files.forEach(function(fileName) {
var fileNameHTML = path.join(uploadDir, htmlDir,
fileName.replace(/(pdf|docx|doc)$/i,"html"));
var fileNamePDF = path.join(uploadDir, fileName);
if (fileName.match(/pdf$/i)) {
execPDF = exec('pdftohtml -c -s -noframes -nodrm '
+ fileNamePDF + ' ' + fileNameHTML,
function(error, stdout, stderr) {
console.log('stdout: ', stdout);
console.log('stderr: ', stderr);
if (error !== null) {
console.log('exec error: ', error);
}
});
execPDF.on('close', function (code) {
console.log('******** PDF to HTML Conversion complete - exit code '
+ code);
});
}
})
});
Once the conversion is done I iterate through all the HTML files and do a MongoDB bulk upsert:
fs.readFile(fileNameHTML, 'utf8', function (err, HTMLData) {
if (err) {
console.log('error reading file: ', fileNameHTML + '/nerror: ' + err);
callback(err);
return;
}
bulk.find({ userName: userName,
docName : fileName
}).upsert()
.updateOne({userName: userName,
docName : fileName,
HTMLData : HTMLData});
I'm working on a web app that reads and writes files. This is in my index.js route file. When I load the results page, it says that it cannot load the coverage file. I understand that this is because it hasn't finished writing before the requests page loads. My question is how can I load the file and update the page when it is done loading?
router.get('/results?', function(req, res) {
var id = req.query.id;
var sequence = fs.readFileSync(temppath + id + ".sequence");
var refseq = fs.readFileSync(temppath + id + ".refseq");
var coverage = fs.readFileSync(temppath + id + ".coverage.txt");
res.render('results', { title: 'Results', sequence: sequence, refseq:refseq, coverage: coverage});
});
router.post('/calculate-coverage', function(req, res) {
var id = crypto.randomBytes(20).toString('hex');
var sequence = req.body.sequence;
var sequence = ">" + temppath + id + "\n" + sequence.replace(/ /g,'');
var refseq = req.body.refseq;
var refseq = ">" + temppath + id + "\n" + refseq.replace(/ /g,'');
//display progress here
//write to files
var sequenceFile = temppath + id + ".sequence";
var refseqFile = temppath + id + ".refseq";
fs.writeFileSync(sequenceFile, sequence);
fs.writeFileSync(refseqFile, refseq);
//bamtools coverage script
var cmd = 'bash ./scripts/coverage.sh ' + sequenceFile + " " + refseqFile + " " + temppath + id;
console.log(cmd);
exec(cmd, function(error, stdout, stderr) {
console.log('stdout: ' + stdout);
console.log('stderr: ' + stderr);
if (error !== null) {
console.log('exec error: ' + error);
}
});
res.location("results?id="+id);
res.redirect("results?id="+id);
});
Never use synchronous function like this. Because if you have 100 concurrent requests and within one of them sync function is used, other 99 clients will wait till this function ends. Instead use async analogs:
fs.readFile(temppath + id + ".sequence", "utf8", function(err, sequence) {
fs.readFile(temppath + id + ".refseq", "utf8", function(err, refseq) {
fs.readFile(temppath + id + ".coverage.txt", "utf8", function(err, coverage) {
res.render('results', { title: 'Results', sequence: sequence, refseq:refseq, coverage: coverage});
});
});
});
var fp = 'ffprobe ' + fileName + ' -show_streams | grep '
var width = exec(fp+'width', function(err, stdout, stderr){
return stdout;
});
alert(stdout + 'random example');
how do I get the stdout 'out' of the process so that I can use it later.
Node's exec function is asynchronous. This means that there is no guarantee that code below the exec call will wait until the child process finishes to run. To execute code once the process quits, then, you must provide a callback which deals with the results. Your code can branch off from there:
var fp = 'ffprobe ' + fileName + ' -show_streams | grep ';
var width = exec(fp+'width', function(err, stdout, stderr){
console.log(stdout);
// ... process stdout a bit ...
afterFFProbe(stdout);
});
function afterFFProbe(output) {
// your program continues here
}
None of the answers above worked for me. This did though.
var probeCommand = 'rtsp://xx.xx.xx.xx/axis-media/media.3gp'
exec('ffprobe '+probeCommand+' | echo ',function(err,stdout,stderr){
console.log(stdout+stderr)
})
If I'm understanding you correctly:
var fp = 'ffprobe ' + fileName + ' -show_streams | grep ',
value,
width = exec(fp+'width', function(err, stdout, stderr) {
value = stdout;
return stdout;
});
alert(value + 'random example');
I think this might work:
var output = "";
var fp = 'ffprobe ' + fileName + ' -show_streams | grep '
var width = exec(fp+'width', function(err, stdout, stderr){
this.output = stdout;
});
alert(output + 'random example');