I´m trying to create a hook where I loop on each result from the response, find a user based on the current object and attach it as another attribute to the final response. However, the async calls are not letting me send the right response:
Board.afterRemote('find',function(context,boards,next){
var users = [];
context.result.admin =[];
var User = app.models.User;
context.result.forEach(function(result){
User.findOne({where:{id:result.adminId}},function(err,user){
result.admin = user;
});
});
console.log("result: "+JSON.stringify(context.result));
next();
});
How can I be able to add the user to each result on the context.result ?
Is User.findOne and asynchronous operation? If so, I would recommend using an async control flow library like async to iterate over the result and perform an asynchronous action on each item. It would look something like this:
Board.afterRemote('find',function(context,boards,next){
async.each(context.result, function(result, callback) {
User.findOne({where:{id:result.adminId}},function(err,user){
result.admin = user;
callback(err) // Done with this iteration
});
}, function(err) { // Called once every item has been iterated over
console.log("result: "+JSON.stringify(context.result));
next();
});
});
Related
In my controller called MapController I'm doing a function to do a parse of remote json files, and from an if-else structure add some values in an array called "parsewebservice", apparently everything is working fine but console.log ( parsewebservice); is not returning the values that were passed to the array "parsewebservice" in the place where it is returning it empty. But when I put it inside the forEach it returns, but everything cluttered and repeated then is not the right way.
I wanted to know why the values that were passed to the array "parsewebservice" are not going along with the variable after populada and what would be the correct way to do it?
Here is my code below:
/**
* MapController
*
* #description :: Server-side logic for managing Maps
* #help :: See http://sailsjs.org/#!/documentation/concepts/Controllers
*/
module.exports = {
index: function(req, res, next) {
Data.find(function foundData(err, datas) {
if (err) return next(err);
var parsewebservice = [];
datas.forEach(function(data, index) {
var req = require("request");
var url = data.address + "?f=pjson";
req(url, function(err, res, retorno) {
if (err) {
console.log(err);
} else {
var camadas = JSON.parse(retorno);
if (camadas.mapName) {
camadas.layers.forEach(function(campo, i) {
if (campo.subLayerIds != null) {
} else if (campo.subLayerIds == null) {
parsewebservice.push([i, "dynamicMapLayer", campo.name, data.address]);
}
});
} else if (camadas.serviceDataType) {
parsewebservice.push([null, "imageMapLayer", camadas.name, data.address]);
} else if (camadas.type) {
parsewebservice.push([null, "featureLayer", camadas.name, data.address]);
}
}
});
});
console.log(parsewebservice);
});
},
};
My first comment has to be that you should not combine function(req, res) with var req = require('request')... you lose your access to the original req object!
So, you need to run a list of async tasks, and do something when they are all complete. That will never be entirely easy, and no matter what, you will have to get used to the idea that your code does not run from top to bottom as you've written it. Your console.log at the bottom runs before any of the callbacks (functions you pass in) you pass to your external requests.
The right way to do this is to use promises. It looks like you are using this request library, whose returned requests can only accept callbacks, not be returned as promises. You can create your own promise wrapper for them, or use an alternative library (several are recommended on the page).
I don't want to write a whole intro-to-promises right here, so what I will do is give you a less pretty, but maybe more understandable way to run some code at the completion of all your requests.
Data.find(function foundData(err, datas) {
if (err) return next(err);
var parsewebservice = [];
// here we will write some code that we will run once per returned data
var processResponse = function(resp) {
parsewebservice.push(resp);
if(parsewebservice.length >= datas.length) {
// we are done, that was the final request
console.log(parsewebservice);
return res.send({data: parsewebservice)}); // or whatever
}
};
datas.forEach(function(data, index) {
var request = require("request");
var url = data.address + "?f=pjson";
request(url, function(err, res, retorno) {
// do some processing of retorno...
// call our function to handle the result
processResponse(retorno);
});
});
console.log(parsewebservice); // still an empty array here
});
I solved the problem.
the "request" module is asynchronous so we need to wait for it to respond and then send the response to the view.
To do this we created a function called "foo" to contain the foreach and the request, we made a callback of that function and finally we made the response (res.view) within that function, so that the controller response would only be sent after the response of the "foo" function to the callback. So we were able to parse.json the data from the "data" collection using foreach and the "request" module and send the objects to the view.
Many thanks to all who have helped me, my sincere thanks.
I have a forEach loop in NodeJS, iterating over a series of keys, the values of which are then retrieved asynchronously from Redis. Once the loop and retrieval has complete, I want to return that data set as a response.
My problem at the moment is because the data retrieval is asyncrhonous, my array isn't populated when the response is sent.
How can I use promises or callbacks with my forEach loop to make sure the response is sent WITH the data?
exports.awesomeThings = function(req, res) {
var things = [];
client.lrange("awesomeThings", 0, -1, function(err, awesomeThings) {
awesomeThings.forEach(function(awesomeThing) {
client.hgetall("awesomething:"+awesomeThing, function(err, thing) {
things.push(thing);
})
})
console.log(things);
return res.send(JSON.stringify(things));
})
I use Bluebird promises here. Note how the intent of the code is rather clear and there is no nesting.
First, let's promisify the hgetall call and the client -
var client = Promise.promisifyAll(client);
Now, let's write the code with promises, .then instead of a node callback and aggregation with .map. What .then does is signal an async operation is complete. .map takes an array of things and maps them all to an async operation just like your hgetall call.
Note how Bluebird adds (by default) an Async suffix to promisifed methods.
exports.awesomeThings = function(req, res) {
// make initial request, map the array - each element to a result
return client.lrangeAsync("awesomeThings", 0, -1).map(function(awesomeThing) {
return client.hgetallAsync("awesomething:" + awesomeThing);
}).then(function(things){ // all results ready
console.log(things); // log them
res.send(JSON.stringify(things)); // send them
return things; // so you can use from outside
});
};
No lib is needed. Easy as pie, it's just an async loop. Error handling is omitted. If you need to do a parallel async loop use a counter.
exports.awesomeThings = function(req, res) {
client.lrange("awesomeThings", 0, -1, function(err, awesomeThings) {
var len = awesomeThings.length;
var things = [];
(function again (i){
if (i === len){
//End
res.send(JSON.stringify(things));
}else{
client.hgetall("awesomething:"+awesomeThings[i], function(err, thing) {
things.push(thing);
//Next item
again (i + 1);
})
}
})(0);
});
connection.query(listprofiles,function(error,profilesReturned){
console.log(profilesReturned.length)
for (var i=0;i<profilesReturned.length;i++){
console.log(profilesReturned[i].column)
var query2='SELECT IF(COUNT(*) >0, TRUE,FALSE)
as response FROM table where
column1='+connection.escape(staticvalue)+'AND
r_circleowner_id='+connection.escape(
profilesReturned[i].column);
console.log(i+':'+query2);
connection.query(query2,function(error,result){
console.log(result)
})
In the above code, I first run a query called 'listprofiles' which returns a set of responses. For every individual response I get from the query, I want to pass it on to query2. I attempted a for loop but only the last query is fired of to mysql. The last query is generated from the last element of profilesReturned set.
How do I ensure that separate queries containing separate values from profilesReturned are fired off sequentially or asynchronously?
You should be using promises - upon success of query one, you resolve the promise and execute query two, and so on.
Example with node-promise
var Promise = require("promise").Promise;
var promise = new Promise();
asyncOperation(function(){
Promise.resolve("succesful result");
});
promise.then(function(result){
... when the action is complete this is executed ...
},
function(error){
... executed when the promise fails
});
connection.query() is async. You need to use something that can control the loop such as async.each() for parallel or async.eachSeries() for series:
var async = require('async');
var results = [];
connection.query(listprofiles,function(error,profilesReturned){
console.log(profilesReturned.length)
async.each(profilesReturned, function(item, callback) {
console.log(item.column)
var query2='SELECT IF(COUNT(*) >0, TRUE,FALSE)
as response FROM table where
column1='+connection.escape(staticvalue)+'AND
r_circleowner_id='+connection.escape(
item.column);
connection.query(query2,function(err,result){
if (!err) {
console.log(result);
results.push(result);
}
callback();
});
}, function(err) {
console.log("all done");
console.log(results.length);
});
I would like to 'functionalize' my queries by putting them into functions which have apt names for the task.
I want to avoid putting everything in the req, res functions (my controllers), and instead put them in 'models' of sorts, that is, another JavaScript file that will be imported and used to run the functions that execute queries and return the results on behalf of the controller.
Assuming that I have the following setup for the queries:
UserController.js
exports.userAccount = function(req, res, next) {
var queryText = "\
SELECT *\
FROM users\
WHERE id = $1\
";
var queryValues = [168];
pg.connect(secrets.DATABASE_URL, function(err, client, done) {
client.query(queryText, queryValues, function(err, result) {
res.render('pathToSome/page', {
queryResult: result.rows
});
});
});
}
Here, while I'm in the query, I essentially redirect and render a page with the data. That works fine. But I want to take out all that pg.connect and client.query code and move it to a separate file to be imported as a model. I've come up with the following:
UserModel.js
exports.findUser = function(id) {
// The user to be returned from the query
// Local scope to 'findUser' function?
var user = {};
var queryText = "\
SELECT *\
FROM users\
WHERE id = $1\
";
var queryValues = [id];
pg.connect(secrets.DATABASE_URL, function(err, client, done) {
client.query(queryText, queryValues, function(err, result) {
// There is only ever 1 row returned, so get the first one in the array
// Apparently this is local scope to 'client.query'?
// I want this to overwrite the user variable declared at the top of the function
user = result.rows;
// Console output correct; I have my one user
console.log("User data: " + JSON.stringify(user));
});
});
// I expect this to be correct. User is empty, because it was not really
// assigned in the user = result.rows call above.
console.log("User outside of 'pg.connect': " + JSON.stringify(user));
// I would like to return the user here, but it's empty!
return user;
};
and I'm calling my model function as so:
var user = UserModel.findUser(req.user.id);
The query executes perfectly fine in this fashion - except that the user object is not being assigned correctly (I'm assuming a scope issue), and I can't figure it out.
The goal is to be able to call a function (like the one above) from the controller, have the model execute the query and return the result to the controller.
Am I missing something blatantly obvious here?
pgconnect is an asynchronous call. Instead of waiting for data to return from the database before proceeding with the next line, it goes ahead with the rest of the program before Postgres answers. So in the code above, findUser returns a variable that has not yet been populated.
In order to make it work correctly, you have to add a callback to the findUser function. (I told you wrong in a previous edit: The done parameter in pg.connect is called in order to release the connection back to the connection pool.) The final result should look something like this:
exports.findUser = function(id, callback) {
var user = {};
var queryText = "SELECT FROM users WHERE id = $1";
var queryValues = [id];
pg.connect(secrets.DATABASE_URL, function(err, client, done) {
client.query(queryText, queryValues, function(err, result) {
user = result.rows;
done(); // Releases the connection back to the connection pool
callback(err, user);
});
});
return user;
};
And you'd use it, not like this:
var user = myModule.findUser(id);
But like this:
myModule.findUser(id, function(err, user){
// do something with the user.
});
If you have several steps to perform, each of them dependent on data from a previous asynchronous call, you'll wind up with confusing, Inception-style nested callbacks. Several asynchronous libraries exist to help you with making such code more readable, but the most popular is npm's async module.
I have a forEach loop in NodeJS, iterating over a series of keys, the values of which are then retrieved asynchronously from Redis. Once the loop and retrieval has complete, I want to return that data set as a response.
My problem at the moment is because the data retrieval is asyncrhonous, my array isn't populated when the response is sent.
How can I use promises or callbacks with my forEach loop to make sure the response is sent WITH the data?
exports.awesomeThings = function(req, res) {
var things = [];
client.lrange("awesomeThings", 0, -1, function(err, awesomeThings) {
awesomeThings.forEach(function(awesomeThing) {
client.hgetall("awesomething:"+awesomeThing, function(err, thing) {
things.push(thing);
})
})
console.log(things);
return res.send(JSON.stringify(things));
})
I use Bluebird promises here. Note how the intent of the code is rather clear and there is no nesting.
First, let's promisify the hgetall call and the client -
var client = Promise.promisifyAll(client);
Now, let's write the code with promises, .then instead of a node callback and aggregation with .map. What .then does is signal an async operation is complete. .map takes an array of things and maps them all to an async operation just like your hgetall call.
Note how Bluebird adds (by default) an Async suffix to promisifed methods.
exports.awesomeThings = function(req, res) {
// make initial request, map the array - each element to a result
return client.lrangeAsync("awesomeThings", 0, -1).map(function(awesomeThing) {
return client.hgetallAsync("awesomething:" + awesomeThing);
}).then(function(things){ // all results ready
console.log(things); // log them
res.send(JSON.stringify(things)); // send them
return things; // so you can use from outside
});
};
No lib is needed. Easy as pie, it's just an async loop. Error handling is omitted. If you need to do a parallel async loop use a counter.
exports.awesomeThings = function(req, res) {
client.lrange("awesomeThings", 0, -1, function(err, awesomeThings) {
var len = awesomeThings.length;
var things = [];
(function again (i){
if (i === len){
//End
res.send(JSON.stringify(things));
}else{
client.hgetall("awesomething:"+awesomeThings[i], function(err, thing) {
things.push(thing);
//Next item
again (i + 1);
})
}
})(0);
});