Access API endpoints in MEANjs from server controller - javascript

so i have this problem i am working on 'following' feature in my application. What's important, i have two models:
Follows and Notifications
When I hit follow button in front-end I run function from follow.client.controller.js which POSTs to API endpoint /api/follows which corresponds to follow.server.controller.js and then update action on Follows model is performed - easy. AFAIK thats how it works (and it works for me).
But in follows.server.controller.js I want also invoke post to API endpoint at /api/notifications which corresponds to notifications.server.controller.js but I can't find a proper way to do that. Any help will be appreciated.
I don't want another call from front-end to add notification because it should be automatic = if user starts following someone, information is saved in both models at once.

You can add middleware in your server route.
app.route('/api/follows')
.post(notification.firstFunction, follows.secondFunction);
And now add 2 methods in your contollers. First makes the call to db and add's some result's data to request object which will be forwarded to second method.
exports.firstFunction= function(req, res, next) {
Notification.doSometing({
}).exec(function(err, result) {
if (err) return next(err);
req.yourValueToPassForward = result
next(); // <-- important
});
};
exports.secondFunction= function(req, res) {
//...
};
Or you can make few database calls in one api method, joining this calls with promises. Example:
var promise = Meetups.find({ tags: 'javascript' }).select('_id').exec();
promise.then(function (meetups) {
var ids = meetups.map(function (m) {
return m._id;
});
return People.find({ meetups: { $in: ids }).exec();
}).then(function (people) {
if (people.length < 10000) {
throw new Error('Too few people!!!');
} else {
throw new Error('Still need more people!!!');
}
}).then(null, function (err) {
assert.ok(err instanceof Error);
});

Related

how to break logic into a controller and a model in a node

I do not quite understand how to properly break the logic on the controllers and models in nodeJS when working with the backend application. Suppose I have an example
This code is in the model of my application, and logically I understand that the model is only responsible for choosing from the database, and the controller and everything else should be done by the controller, but I don’t quite understand how to do this and I tried to transfer part of the code to the controller and export it, but I did not succeed (Please, help, at least with this example! The main thing for me is to understand the principle of working with MVC in the node !!!
exports.currentPostPage = function(req, res){
db.query('SELECT * FROM `posts`', function (err, result) {
if (err){
console.log(err);
}
var post = result.filter(item => {return (item.id == req.params.id)? item: false})[0];
if (post === undefined){
res.render('pages/404');
} else {
res.render('pages/post-page', {postId: req.params.id, item: post});
}
});
};
So, you're on the right track. There's a lot of different ways to do it depending on preferences, but one pattern I've seen pretty commonly is to use the callback as a way to integrate. For example, let's say you have your model file:
exports.getPostById = (id, cb) => {
db.query('SELECT * FROM `posts` WHERE id=?', [id], function (err, result) {
if (err){
return cb(err); // or, alternatively, wrap this error in a custom error
}
// here, your logic is just returning whatever was returned
return cb(null, result);
});
};
Note I also am letting the DB handling the ID lookup, as it's probably more efficient at doing so for larger data sets. You didn't say what DB module you're using, but all the good ones have some way of doing parametrized queries, so use whatever works w/ your DB driver.
Anyway, the Model file therefore handles just the data interaction, the controller then handles the web interaction:
// postController.js
const model = require('../models/postModel.js'); // or whatever you named it
exports.populatePost = (req, res, next, id) => {
model.getPostById(id, (err, post) => {
if (err) return next(err); // centralized error handler
req.post = post;
next();
});
}
export.getOnePost = (req, res, next) => {
if (req.post) {
return res.render('pages/post-page', req.post);
}
// again, central error handling
return next({ status: 404, message: 'Post not found' });
}
I have mentioned central error handling; I vastly prefer it to scattering error handling logic all over the place. So I either make custom errors to represent stuff, or just do like above where I attach the status and message to an anonymous object. Either will work for our purposes. Then, in a middleware file you can have one or more handler, the simplest like this:
// middleware/errors.js
module.exports = (err, req, res, next) => {
console.error(err); // log it
if (err.status) {
return res.status(err.status).render(`errors/${err.status}`, err.message);
}
return res.status(500).render('errors/500', err.message);
}
Finally, in your routing setup you can do things like this:
const postController = require('../controllers/postController');
const errorHandler = require('../middleware/errors.js');
const postRouter = express.Router();
postRouter.param('postId', postController.populatePost);
postRouter.get('/:postId', postController.getOnePost);
// other methods and routes
app.use('/posts', postRouter)
// later
app.use(errorHandler);
As was pointed out in the comments, some folks prefer using the Promise syntax to callbacks. I don't personally find them that much cleaner, unless you also use the async/await syntax. As an example, if your db library supports promises, you can change the model code to look like so:
exports.getPostById = async (id, cb) => {
// again, this assumes db.query returns a Promise
return await db.query('SELECT * FROM `posts` WHERE id=?', [id]);
}
Then your controller code would likewise need to change to handle that as well:
// postController.js
const model = require('../models/postModel.js'); // or whatever you named it
exports.populatePost = async (req, res, next, id) => {
try {
const post = await model.getPostById(id)
req.post = post
return next()
} catch (err) {
return next(err)
}
}

Request inside a nested request loop to another server

I need to request X products from another server and I need to wait for that execution to finish before proceeding and saving the order in the database.
Let's say I receive via post an array of product Ids that I need to add to the order, e.g
JSON FILE:
{
"order_products":[1,2,3,4]
}
Here's a code sample:
//Express module
var router = require('express').Router();
//HTTP Request module
var client = require('request');
//Util that saves the URLs of the other databases
var productURL = require('../utils/product/productURL');
//Builds a product object given a JSON
var productBuilder = require('../utils/product/productBuilder');
router.post('/', req, res) {
//Instantiate a new order
var orderInstance = new order({
date: Date.now
});
//Query the products in the other server and add them to the order
req.body.order_products.forEach(id => {
client.get(productURL.HTTPS + id, { json: true }, (err, res, JSONProduct) => {
var product = productBuilder.build(JSONProduct);
orderInstance.order_products.push(product);
});
};
//Save the order in the database
orderInstance.save(....);
//Send response
res.status(201).json(orderInstance);
}
The problem here is that while the loop is still executing, the response is sent (201) and the orderInstance is saved without any product. If I console.log the products they only appear after the orderInstance is saved.
I've tried implementing callbacks to fix this issue, but with no success. I'd appreciate if anyone could lend me a hand here! Thanks in advance :smiley:(edited)
forEach runs synchronously - when the forEach ends, the client.get requests may have all been sent out, but the responses surely haven't come back yet. You need to convert each request into a Promise, and then call Promise.all on an array of those Promises. The Promise.all will resolve once all responses have come back. For example:
const allPromises = req.body.order_products.map(id => new Promise((resolve, reject) => {
client.get('productURL.HTTPS' + id, { json: true }, (err, res, JSONProduct) => {
if (err) reject (err);
else resolve(productBuilder.build(JSONProduct));
});
}));
Promise.all(allPromises)
.then((newProducts) => {
orderInstance.order_products.push(...newProducts);
res.status(201).json(orderInstance);
})
.catch((err) => {
// handle errors
});

Async confusion in nodejs function

I always have multiple operations in one route or endpoint. Take an example below, when a user deletes an item, I want the related file be deleted in s3 too besides deleting related collection from the database.
So is the code below ok? Does it matter if I put the first function (delete file from s3) inside the DeleteItem function?
router.post('/item/delete', function(req, res) {
if(req.body.dlt_item){
var tempArray = [];
tempArray.push({"Key":req.body.dlt_item});
s3Bucket.deleteObjects({
Bucket: 'myS3',
Delete: {
Objects: req.body.dlt_item
}
}, function(err, data) {
if (err)
return console.log(err);
});
}
Item.DeleteItem(req.body.item_id, function(err,result){
if(err){console.log(err)}
res.send({result:1});
})
});
You should organise your code like this. This will ensure that s3 deletion will start only when mongodb deletion has finished.
In your code both things happen simultaneously. this may cause issue in some cases.
If one fails and other succeeds then there will be trouble. Suppose s3 files get deleted successfully and mongo deletion fails. Then you will have many references to non existing resources.
router.post('/item/delete', function(req, res) {
if(req.body.dlt_item){
var tempArray = [];
tempArray.push({"Key":req.body.dlt_item});
Item.DeleteItem(req.body.item_id, function(err,result){
if(err)
{
console.log(err)
res.send(err);
}
else
{
//deletion from mongodb is succesful now delete from s3
s3Bucket.deleteObjects({
Bucket: 'myS3',
Delete: {
Objects: req.body.dlt_item
}
},function(err, data) {
if (err)
{
// deletion from s3 failed you should handle this case
res.send({result:1});
return console.log(err);
}
else
{
// successful deletion from both s3 and mongo.
// If you do not want to wait for this then send the response before this function.
res.send({result:1});
}
});
}
})
});

index a mongo db with javascript

I have working script which stores the html forms i have created in mongoDB. It works awesome. However, i can't search any of the data i place in mongo, because i don't have an index.
I realize I could create the index from the console, but for my system to work the way we need, I really need the index to be created when the data is stored. So, i need to place code in the javascript that actually creates the code 9using node.js or directly).
I tried the following javascript (with node.js), but it does not appear to work.
app.post('/:db/:collection/formSubmit', function(req, res) {
var json = form2json.transform(req.rawBody);
var db = new mongo.Db(req.params.db, new mongo.Server(config.db.host, config.db.port,mongoOptions ));
db.open(function(err, db) {
db.authenticate(config.db.username, config.db.password, function () {
db.collection(req.params.collection, function(err, collection) {
collection.insert(Array.isArray(json) ? json[0] : json, function(err, docs) {
res.header('Content-Type', 'application/json');
if(req.is('application/xml')) {
res.send('<ok>1<ok>')
} else {
es.send(json, 201);
}
// my attempt to create an index while posting a form follows
db.core.ensureIndex( { "document": 1 } )
db.close();
});
});
});
});
});
You need to call ensureIndex on the collection:
collection.ensureIndex({ "document": 1 }, function (err, indexName) {
db.close();
});

Node API design and re-using code

My API has three endpoints: articles, websites, and users. Each article is associated with a website. A user can also share articles.
In my API, I have just created an endpoint at /website/:id/articles. This will query the database for articles associated with the given website. It then performs some manipulation on the data for each article based on who is talking to the API ("has the user shared this article?", for example).
I am now moving on to create a similar endpoint at /users/:id/shared-articles. The database query for this is slightly different, but the manipulation I want to perform on the articles data following the query is the same as before.
Here is some pseudo code for the former endpoint:
router.get('/websites/:id/articles', function (req, res) {
articleService.find({ websiteId: req.params.id }, function (error, foundArticles) {
async.waterfall([
function (cb) {
// Manipulate foundArticles…
cb(null, manipulatedArticles)
},
function (articles, cb) {
// Manipulate articles some more…
cb(null, manipulatedArticles)
},
], function (error, articles) {
if (error) {
return res.json(error, 400)
}
res.json(articles)
})
})
})
To create my new endpoint, /users/:id/shared-articles, I could abstract the manipulation tasks into a function that can be shared by both of my endpoints (the waterfall seen above), reducing code repetition.
router.get('/websites/:id/articles', function (req, res) {
articleService.find({ websiteId: req.params.id }, function (error, foundArticles) {
manipulateArticles(foundArticles, function (articles) {
if (error) {
return res.json(error, 400)
}
res.json(articles)
})
})
})
router.get('/users/:id/shared-articles', function (req, res) {
shareActionService.find({ userId: req.params.id }, function (error, foundShareActions) {
var sharedArticleIds = { _id: { $in: _.pluck(foundShareActions, 'sharedArticleId') } }
articleService.find(sharedArticleIds, function (error, foundArticles) {
manipulateArticles(foundArticles, function (articles) {
if (error) {
return res.json(error, 400)
}
res.json(articles)
})
})
})
})
However, I figured that this sort of code re-use problem must be common when designing APIs in Node, and I would like to know if there is an obviously better solution that I am missing here.
One idea I had would be to have all article sub-resources (such as /users/:id/shared-articles or /websites/:id/links) talk to the /links API internally, which itself would deal with the manipulation I mention above. The problem then is that I would have to make /links very verbose in the query headers/parameters it needs, in order to allow for the different database queries needed (such as those by the two sub-resource endpoints demonstrated here).
Is there a better solution/abstraction here?
You can create a "service" layer. Abstract the link manipulation into a completely separate file and call it from each of the routes.
Create a service/links.js:
module.exports = {
manipulateLinks: function (response) {
// Manipulate code
return response
}
}
Then in you routes, call the function:
var linkservice = require('../service/links')
var response = linkservice.manipulateLinks(response)

Categories

Resources