Having difficulty with javascript(node.js) that needs to be synchronous - javascript

I have an express.js app that needs to run a script on the server in order to derive some values using functions later. Here's the gist of it:
shell.exec(commandString);
readFolder();
renderPage();
Essentially, I need to run a script on the server, then run the second function, then run the third function. These need to happen subsequently, but it seems that javascript moves on ahead with the the second and third function no matter what I do. I've tried promises, async, callbacks. All of which I only partially understand and seem to get zero progress.
I will admit that I am a javascript novice. I am working on a project with others and this task fell to me. I doubt this is the best way to accomplish our ultimate goals, but I am left with little choice. please help.
I'll put the entire post here for reference:
//Run script when post is rec'd from root and send to results page
app.post("/", (req, res) => {
var commandString;
//take values and create complete command for Astrum script
commandString = 'bash /home/astrum/Main/Astrum.sh -s ' + req.body.speed + ' -h ' + req.body.host + ' -u ' + req.body.username + ' -p ' + req.body.password;
//execute command in shell
shell.exec(commandString);
readFolder();
renderPage();
//Iterate thru filenames to create arrays for links and link labels
function readFolder() {
fs.readdir('./reports/html/', (err, files) => {
//variable & method for links to html records pages
ipAddressesLink = files; //this is initialized earlier, globally
//variable and method to remove file extension for link labels in pug
ipAddresses = files.map(removeExtension); //this is initialized earlier, globally
});
}
//function to remove last five characters of each element
function removeExtension(value) {
return value.substring(0, value.length - 5);
};
//function to render the page
function renderPage() {
res.render("results", {ipAddressesLink, ipAddresses, title: 'Results'});
}
res.end();
});

You could write it this way:
shell.exec(commandString, (error, stdout, stderr) => {
// Calling the 1st function after shell command is executed
readFolder();
});
function readFolder() {
fs.readdir('./reports/html/', (err, files) => {
// Some stuff
...
// Calls the 2nd function after fs is done reading files
renderPage();
});
}
function renderPage() {
const options = { ... }; // IP addresses etc.
res.render(
"results",
options,
// Calls the final function after render is finished
sendResponse
);
}
function sendResponse(err, html) {
// Sends the response. It’s possible that res.send() is the better solution here
res.end();
}
It’s just the general structure of the callback chain, definitely not the cleanest one. If you want better code structure and readability try switching to async / await syntax.

Is shell here the child_process module? If it is then you can pass an optional callback argument and call your functions from there.
shell.exec(commandString, (error, stdout, stderr) => {
const files = readFolder();
renderPage(files);
});
function readFolder() {
...
return fs.readdirSync(files);
}
function renderPage(files) {
...
}

Related

sails.js node.js Parse JSON on controller

In my controller called MapController I'm doing a function to do a parse of remote json files, and from an if-else structure add some values in an array called "parsewebservice", apparently everything is working fine but console.log ( parsewebservice); is not returning the values that were passed to the array "parsewebservice" in the place where it is returning it empty. But when I put it inside the forEach it returns, but everything cluttered and repeated then is not the right way.
I wanted to know why the values that were passed to the array "parsewebservice" are not going along with the variable after populada and what would be the correct way to do it?
Here is my code below:
/**
* MapController
*
* #description :: Server-side logic for managing Maps
* #help :: See http://sailsjs.org/#!/documentation/concepts/Controllers
*/
module.exports = {
index: function(req, res, next) {
Data.find(function foundData(err, datas) {
if (err) return next(err);
var parsewebservice = [];
datas.forEach(function(data, index) {
var req = require("request");
var url = data.address + "?f=pjson";
req(url, function(err, res, retorno) {
if (err) {
console.log(err);
} else {
var camadas = JSON.parse(retorno);
if (camadas.mapName) {
camadas.layers.forEach(function(campo, i) {
if (campo.subLayerIds != null) {
} else if (campo.subLayerIds == null) {
parsewebservice.push([i, "dynamicMapLayer", campo.name, data.address]);
}
});
} else if (camadas.serviceDataType) {
parsewebservice.push([null, "imageMapLayer", camadas.name, data.address]);
} else if (camadas.type) {
parsewebservice.push([null, "featureLayer", camadas.name, data.address]);
}
}
});
});
console.log(parsewebservice);
});
},
};
My first comment has to be that you should not combine function(req, res) with var req = require('request')... you lose your access to the original req object!
So, you need to run a list of async tasks, and do something when they are all complete. That will never be entirely easy, and no matter what, you will have to get used to the idea that your code does not run from top to bottom as you've written it. Your console.log at the bottom runs before any of the callbacks (functions you pass in) you pass to your external requests.
The right way to do this is to use promises. It looks like you are using this request library, whose returned requests can only accept callbacks, not be returned as promises. You can create your own promise wrapper for them, or use an alternative library (several are recommended on the page).
I don't want to write a whole intro-to-promises right here, so what I will do is give you a less pretty, but maybe more understandable way to run some code at the completion of all your requests.
Data.find(function foundData(err, datas) {
if (err) return next(err);
var parsewebservice = [];
// here we will write some code that we will run once per returned data
var processResponse = function(resp) {
parsewebservice.push(resp);
if(parsewebservice.length >= datas.length) {
// we are done, that was the final request
console.log(parsewebservice);
return res.send({data: parsewebservice)}); // or whatever
}
};
datas.forEach(function(data, index) {
var request = require("request");
var url = data.address + "?f=pjson";
request(url, function(err, res, retorno) {
// do some processing of retorno...
// call our function to handle the result
processResponse(retorno);
});
});
console.log(parsewebservice); // still an empty array here
});
I solved the problem.
the "request" module is asynchronous so we need to wait for it to respond and then send the response to the view.
To do this we created a function called "foo" to contain the foreach and the request, we made a callback of that function and finally we made the response (res.view) within that function, so that the controller response would only be sent after the response of the "foo" function to the callback. So we were able to parse.json the data from the "data" collection using foreach and the "request" module and send the objects to the view.
Many thanks to all who have helped me, my sincere thanks.

handling a null get in res.json

I got a nodejs app built like this:
app.get('/customer/:ent_cust_id', function (req, res, next) {
var query = 'Select * from entcustinfo where ent_cust_id = ' + req.params.ent_cust_id;
console.log('Select * from entcustinfo where ent_cust_id =' + req.params.ent_cust_id);
client.execute(query, function (err, result) {
if (err) return next (err);
var row = result.rows[0];
//Response
res.json({ent_cust_id: req.params.ent_cust_id, name: row.get('offers')});
});
});
but it causes node to shut down like this when the results of the array come back empty...
[root#ip-10-205-116-141 cassapi]# /usr/local/bin/node app.js
Example app listening at http://0.0.0.0:3030
Select * from entcustinfo where ent_cust_id =1106667844
events.js:87
throw er; // Unhandled 'error' event
^
TypeError: Cannot read property 'get' of undefined
at /home/ec2-user/cassapi/app.js:16:14
at readCallback (/home/ec2-user/cassapi/node_modules/cassandra-driver/lib/request-handler.js:195:5)
at Connection.invokeCallback (/home/ec2-user/cassapi/node_modules/cassandra-driver/lib/connection.js:567:5)
at Connection.handleResult (/home/ec2-user/cassapi/node_modules/cassandra-driver/lib/connection.js:507:8)
at ResultEmitter.emit (events.js:120:17)
at ResultEmitter.each (/home/ec2-user/cassapi/node_modules/cassandra-driver/lib/streams.js:437:17)
at ResultEmitter._write (/home/ec2-user/cassapi/node_modules/cassandra-driver/lib/streams.js:421:10)
at doWrite (_stream_writable.js:303:12)
at writeOrBuffer (_stream_writable.js:290:5)
at ResultEmitter.Writable.write (_stream_writable.js:219:11)
I've tried to adjust app.get like this to check to see if the array is empty:
app.get('/customer/:ent_cust_id', function (req, res, next) {
var query = 'Select * from entcustinfo where ent_cust_id = ' + req.params.ent_cust_id;
console.log('Select * from entcustinfo where ent_cust_id =' + req.params.ent_cust_id);
client.execute(query, function (err, result) {
if (err) return next (err);
var row = result.rows[0];
if (row.get('ent_cust_id') = '') {
res.send('ent_cust_id: ' + req.params.ent_cust_id + ' not found. Not all data is loaded.');
} else {
var row = result.rows[0];
//Response
res.json({ent_cust_id: req.params.ent_cust_id, accts: row.get('accts'), offers: row.get('offers')});
}
});
});
I'm thinking I need an if statement to check if no records were returned in results.rows and then do an res.send. I tried that but the same behavior still occures. How do I find out if no records were returned?
EDIT:
Made code changes in the first part...pasted in the wrong code...clarified question too.
It depends on the exact behavior of your client.execute(sql, callback(err, result)) function. You just have to test for what it returns on no results.
Ordinarily a callback from a database query will pass an empty array as the result if there are no results. So if you handle the case where result.length==0, you will no longer be trying to reference result[i].row.get on rows that no longer exist.
Although it is not the cause of this particular problem, you also want to escape your SQL queries. There is really no downside and it significantly increases your application security.
Let's attack the problem. First, your error:
TypeError: Cannot read property 'get' of undefined
This is node telling you that you wrote <object>.get, but when it tried to access the get property, it found that <object> was undefined.
Even more so, it's telling you where it happened:
/home/ec2-user/cassapi/app.js:16:14
Go to that file, and take a look at line 16. It's probably one of the 2 you posted:
app.get('/customer/:ent_cust_id', ...)
// or
row.get('offers')
So, you think app or row are objects with a get() method, but one of them must be undefined. This can happen for 2 reasons:
The app has a value of undefined-- was it assigned? Is it coming from another function? Did that function forget to return?
The row variable, assigned to result.rows[0], is undefined. Does result.rows have a [0] element? Could the array be empty?
Check the part where you create the app object, and the results you obtain .row[0] from. Use console.log to see their values. The error should be easy to spot.
//in your require, consider using below:
//var body-parser = require('body-parser'); // after installing with npm. I recommend globally installing
//app.use(body-parser()); //make sure you add this to initialize body parser after installing it. *NOTE: If using node with express 4.~ this is deprecated so ignore.
app.get('/customer/:ent_cust_id', function (req, res, next) {
//Your code: var query = 'Select * from entcustinfo where ent_cust_id = ?' [req.params.ent_cust_id];
/* Above, should probably include a plus sign between "?'" and "[req.params.....]; as below: */
var query = 'Select * from entcustinfo where ent_cust_id = ?' + [req.params.ent_cust_id];
//Another typo detailed below
//Your code: consolelog('Select * from entcustinfo where ent_cust_id = ?');
console.log('Select * from entcustinfo where ent_cust_id = ?');
/* Also, at this point you want to be logging your full query var, not your manually entered string value so type this as below: */
console.log(query); // shows your query, not your manually typed string.
//There really shouldn't ever be an issue with your manually typed query unless you need it surrounded in quotes.
//For greater clarification, whatever you're calling client below should be defined and expanded upon with an example.
/* In this code snippet, I typically always send the response first, if there is NOT an err liks so: if(!err){ //do the client's stuff first, else (if there's an error, does not need to be defined) { //do whatever else } Handling errors after it adds a small bit of speed to your process and reduces a step if you get a valid response and leaves error responding to secondary processing under the expectation you won't encounter an error */
client.execute(query, function (err, result) {
if(!err) { //do your client stuff first if no error then return next.
} else { console.log(err); return next} ; //No conditional needed for the error, because you've gotten here due to your code not working.
//if (err) return next (err);
var row = result.rows[0];
//Response
res.json({ent_cust_id: req.params.ent_cust_id, name: row.get('offers')}); /* Here you're responding with a JSON object as if you were querying a large JSON file. You may want to consider using res.send with and application/json response type. */
});
});
<div> For additional explanation on routing, you may want to look at express documentation here: express res.json</div>
<p> Also, make sure you're requiring body-parser in node so you can interpret incoming routes! </p>
<code> npm install -g body-parser </code>

Get file name from stats object

I use the fs.stat for getting information about file, now I want to get the file name from the stats object, I did some searching and I didnt find anything help.
I will explain my code now, maybe someone will find another solution, this is all my code:
function index(response, lang) {
temp.loadTpl('templates/' + lang + '/content.html', function (content) {
fs.readdir('files/', function (err, files) {
temp.loadTpl('templates/' + lang + '/files.html', function (data) {
if (err) console.log(err);
var i, fnames, len = files.length, filesNamesHTML = '';
fnames = files;
for (i = 0; i < len; i++) {
fs.stat('files/' + files[i] , function (err, stats) {
console.log(i);
if (stats.isFile()) temp.write('TYPE', 'file-o');
else temp.write('TYPE', 'folder');
temp.write('FNAME', fnames[i]);
filesNamesHTML += temp.transTpl(data);
});
}
setTimeout(function () {
temp.write('FILES', filesNamesHTML);
response.write(temp.transTpl(content));
}, 100);
});
});
});
}
The relevant part is:
I use fs.readdir to read directiony files, and I run over the result with loop,
And for every file I run stat function like that:
for (i = 0; i < len; i++) {
fs.stat('files/' + files[i] , function (err, stats) {
temp.write('FNAME', fnames[i]);
});
}
The problem is in this: fnames[i], until that the callback invoked, the i variable is changed, and I always get the same result for every file.
Maybe you have a solution how the get the file name inside the stat callback function.
The problem happend because I try to use the Async functions...
EDIT:
I thought on solution, who wants...
You can put the fs.stat function into a function and to pass this function the file name, and use it inside the stat callback function.
Your EDIT should solve the problem of variable scoping. You haven't explained it very well, but I know what you mean.
You've got a for loop, which is synchronous, but you're using it to start asynchronous functions. Then you're waiting until all files have been fs.stated before doing the final part, by assuming that fs.stat won't take more than 100ms. As asynchronous things go, fs.stat probably isn't too slow, so 100ms should be enough time unless you have a large number of files. However, when you're doing things that are slower and/or take a variable length of time, you'll want to control the timing more tightly so that it does the last bit immediately after it's finished the last of the async bits.
There are npm modules for this kind of management of asynchronous functions. I'm going to answer using the async module, because that's the one I know, but there are equally valid ways of doing it with other modules using promises.
I'd do it like this:
var async = require('async');
function index(response, lang) {
temp.loadTpl('templates/' + lang + '/content.html', function (content) {
fs.readdir('files/', function (err, files) {
temp.loadTpl('templates/' + lang + '/files.html', function (data) {
if (err) console.log(err);
var i, fnames, len = files.length, filesNamesHTML = '';
fnames = files;
async.each(files, function(file, done) {
fs.stat('files/' + file , function (err, stats) {
console.log(i);
if (stats.isFile()) temp.write('TYPE', 'file-o');
else temp.write('TYPE', 'folder');
temp.write('FNAME', fnames[i]);
filesNamesHTML += temp.transTpl(data);
done(null);
});
}, function() {
temp.write('FILES', filesNamesHTML);
response.write(temp.transTpl(content));
});
});
});
});
}
(Then do npm install async .)
There are other things I could change, but this is tricky stuff the first time you see it so I'll refrain. Suffice it to say that it's worth exploring the async module or something equivalent if you want to write javascript code involving multiple calls to asynchronous functions.

How do I run an asynchronous 'find' in a loop while incrementing the find parameter so I can generate unique custom id's?

I'm new to mongoose/mongodb and I am trying to do some sort of error handling with my document save.
I am trying to create a stub id to store into the db for easier data retrieval later on (and also to put into the url bar so people can send links to my website to that particular page more easily -- like jsfiddle or codepen).
Basically I want to search for a document with a page_id and if it exists, I want to regenerate that page_id and search until it gets to one that's unused like this:
while(!done){
Model.findOne({'page_id': some_hex}, function (err, doc) {
if(doc){
some_hex = generate_hex();
}
else
{
done = true;
}
});
}
model.page_id = some_hex;
model.save();
However, since mongoose is asynchronous, the while loop will pretty much run indefinitely while the find works in the background until it finds something. This will kill the resources on the server.
I'm looking for an efficient way to retry save() when it fails (with a change to page_id). Or to try and find an unused page_id. I have page_id marked as unique:true in my schema.
Retrying should be performed asynchronously:
var tryToSave = function(doc, callback) {
var instance = new Model(doc);
instance.page_id = generate_hex();
instance.save(function(err) {
if (err)
if (err.code === 11000) { // 'duplicate key error'
// retry
return tryToSave(doc, callback);
} else {
// another error
return callback(err);
}
}
// it worked!
callback(null, instance);
});
};
// And somewhere else:
tryToSave(doc, function(err, instance) {
if (err) ...; // handle errors
...
});

Async in getting files in folder?

I am new in Node.js . All I did in here is get file list in folder... Everything in Node.js seem to async but my function I want it in synchronous way.
So, I do as follow.
function getFiles() {
var file = [];
var walker = walk.walk('./files');
walker.on('file', function (root, stat, next) {
file.push(root + '/' + stat.name);
next();
})
walker.on('end', function () {
console.log(JSON.stringify(file));
})
return file;}
It worked as I expected :
["./files/1.mp3","./files/2.mp3","./files/3.mp3","./files/4.mp3","./files/5.mp3","./files/6.mp3","./files/7.mp3","./files/8.mp3"]
but when I assigned that function to variable
var list = getFiles();
response.writeHead(200, {'Content-type':'application/json'});
response.end(JSON.stringify(list));
It always returned nothing, I think that getFiles() run in another thread so can not receive value of data. Thank for your reading.
I can shed some light on the behavior you are experiencing by outlining the flow of the application as it is run:
call to getFiles
declare files array and walker
bind walker event "file" and "end" to callbacks
return files array
walker file event fires
walker end event fires
as you can see the events are firing out of band with the method call. To deal with this the common node.js approach is to setup your code something like the following:
function getFiles(callback) {
var file = [];
var walker = walk.walk('./files');
walker.on('file', function (root, stat, next) {
file.push(root + '/' + stat.name);
next();
})
walker.on('end', function () {
callback(file);
})
}
now when you go to execute this method you would do something like this:
getFiles(function(list){
response.writeHead(200, {'Content-type':'application/json'});
response.end(JSON.stringify(list));
});
obviously this is a little unsightly since the controller now has to create a callback scenario and the getFiles method need to execute that callback in course. Another approach is to use the Promises concept, which I will leave to the discovery of the reader with the following link: https://github.com/kriskowal/q
Async functions return before they are ready
You can't return data thats not there in your own code
Async functions often take a callback argument that is executed when they are ready
You own code could ask for its own callbacks
`
function getFiles(callBack) {
var file = [];
var walker = walk.walk('./files');
walker.on('file', function (root, stat, next) {
file.push(root + '/' + stat.name);
next();
})
walker.on('end', function () {
console.log(JSON.stringify(file));
callBack(file);
})
}
// assuming server environment, express, connect, etc...
app.get('/list', function(req, res){
getFiles(function(D){ res.json(D) });
});
`

Categories

Resources