Populate Only When Conditions Are Met - javascript

I have a mongodb database and I use mongoose with nodejs.
I need return data from the next query populating "tabela_tuss" only if I have the field "temtussvinculado=true".
Here is what I am doing:
ConvProced.find({'convenioId':new ObjectId(req.params.id)})
.populate('convenioId')
.populate({
path:'procedId',
populate:{
path:'tabela_tuss',
match: { 'procedId.temtussvinculado': true}
}
})
.exec( (err,data) => {
callback(err,data,res)
})
My problem is that my match with "procedId.temtussvinculado:true" has no effect and "tabela_tuss" is never populated.
What am I doing wrong?
Here is my schemas:
////
var conveniosSchema = new mongoose.Schema({
nome: {type: String, unique:true},
ativo: {type: Boolean}
});
module.exports = mongoose.model('Convenio', conveniosSchema,'convenios' );
////
////
const agProcedimentosSchema = new mongoose.Schema({
ativo:{type:Boolean},
temtussvinculado:{type:Boolean},
tabela_tuss:{type:mongoose.Schema.Types.ObjectId, ref:'Tuss_22'}
});
module.exports = mongoose.model('Ag_procedimento', agProcedimentosSchema,'ag_procedimentos' );
///
////
const tuss_22Schema = new mongoose.Schema({
codigo: {type: String, unique:true},
descricao:{type: String},
tabela:{type: String}
});
module.exports = mongoose.model('Tuss_22', tuss_22Schema,'tuss_22' );
////
//../models/convenioprocedimento
var conveniosProcedsSchema = new mongoose.Schema({
convenioId:{type:mongoose.Schema.Types.ObjectId, ref:'Convenio'},
procedId:{type:mongoose.Schema.Types.ObjectId, ref:'Ag_procedimento'},
valor_particular:{type:Number},
valor_convenio:{type:Number},
});
module.exports = mongoose.model('ConvenioProcedimento', conveniosProcedsSchema,'conveniosprocedimentos' );
//my query:
const ConvProced = require('../models/convenioprocedimento');
ConvProced.find({'convenioId':new ObjectId(req.params.id)})
.populate('convenioId')
.populate({
path:'procedId',
populate:{
path:'tabela_tuss',
match: { 'procedId.temtussvinculado': true}
}
})
.exec( (err,data) => {
callback(err,data,res)
})

What you are actually asking here is to "Only populate where a condition within the data says to do so", which is something that is not actually a "directly" supported action of .populate() or usage of the "nested populate" syntax.
So if you want to impose "conditions" on which items are actually populated or not, then you must handle the populate calls "manually".
The basic premise in your case is that you would need to inspect the value which you need to get from the "initial" top level .populate() call, but then "only" call the "inner" populate when the given condtions actually allow it.
So your code should then probably look like this using "Promises" using Promise.all() where you basically "loop" or .map() each query result and test the proceedid.temtussvinculado to see if it is true/false, and where true we actually issue a Model.populate() call, otherwise just return the data in it's present state:
ConvProced.find({'convenioId':new ObjectId(req.params.id)})
.populate('convenioId procedId')
.exec()
.then(data =>
Promise.all(
data.map( d =>
( d.proceedid.temtussvinculado )
? mongoose.model('Tuss_22').populate(d,{ path: 'proceedId.tabela_tuss' })
: d
)
)
)
)
// Populated conditionally
.then( data =>
// Do something with data
)
.catch(err => console.error(err)); // or something else with error
There are different options available other than 'Promises', but it is the no dependency option. Alternate cases such as async.map to do much the same thing, but is an additional dependency if you do not already have it:
ConvProced.find({'convenioId':new ObjectId(req.params.id)})
.populate('convenioId procedId')
.exec((err,data) => {
if (err) throw err;
async.map(data,(d,callback) =>
( d.proceedid.temtussvinculado )
? mongoose.model('Tuss_22').populate(d,{ path: 'proceedId.tabela_tuss' },callback)
: callback(null,d)
(err,data) => {
if (err) throw err; // or something
// Conditionally populated
}
)
})
Also demonstrated with a full working example, which is actually a little more complicated than what you need to do, since the "condition" is nested within another array in this example:
const async = require('async'),
mongoose = require('mongoose'),
Schema = mongoose.Schema;
mongoose.Promise = global.Promise;
mongoose.set('debug',true);
mongoose.connect('mongodb://localhost/test');
const subInnerSchema = new Schema({
label: String
});
const innerSchema = new Schema({
name: String,
populate: Boolean,
subs: [{ type: Schema.Types.ObjectId, ref: 'Sub' }]
});
const outerSchema = new Schema({
title: String,
inners: [{ type: Schema.Types.ObjectId, ref: 'Inner' }]
});
const Sub = mongoose.model('Sub', subInnerSchema);
const Inner = mongoose.model('Inner', innerSchema);
const Outer = mongoose.model('Outer', outerSchema);
function log(data) {
console.log(JSON.stringify(data, undefined, 2))
}
async.series(
[
// Clean data
(callback) =>
async.each(mongoose.models,(model,callback) =>
model.remove({},callback),callback),
// Insert some data
(callback) =>
async.waterfall(
[
(callback) =>
Sub.create([1,2,3,4].map( label => ({ label })),callback),
(subs,callback) =>
Inner.create(
[0,2].map(x => subs.slice(x,x+2))
.map((el,i) => ({
name: i+i,
populate: i == 1,
subs: el
})),
callback
),
(inners,callback) =>
Outer.create(
inners.map((inner,i) => ({
title: i+1,
inners: [inner]
})),
callback
),
],
callback
),
// Conditional populate async.map version
(callback) =>
Outer.find().populate('inners').exec((err,outers) => {
if (err) callback(err);
async.map(
outers,
(outer,callback) =>
async.map(
outer.inners,
(inner,callback) =>
(inner.populate)
? Inner.populate(inner,{ path: 'subs' },callback)
: callback(null,inner),
(err,inners) => {
if (err) callback(err);
outer.inners = inners
callback(null,outer);
}
),
(err,outers) => {
if (err) callback(err);
log(outers);
callback();
}
);
}),
// Conditional populate Promise
(callback) =>
Outer.find().populate('inners').exec()
.then(outers =>
Promise.all(
outers.map( outer =>
new Promise((resolve,reject) => {
Promise.all(
outer.inners.map( inner =>
(inner.populate)
? Inner.populate(inner,{ path: 'subs' })
: inner
)
).then(inners => {
outer.inners = inners;
resolve(outer)
})
.catch(reject)
})
)
)
)
.then(outers => {
log(outers);
callback();
})
.catch(err => callback(err))
],
(err) => {
if (err) throw err;
mongoose.disconnect();
}
);
Which produces the output showing the "conditional" selection, from using either approach of course:
Mongoose: subs.remove({}, {})
Mongoose: inners.remove({}, {})
Mongoose: outers.remove({}, {})
Mongoose: subs.insert({ label: '1', _id: ObjectId("5961830256bf9e2d0fcf13b3"), __v: 0 })
Mongoose: subs.insert({ label: '2', _id: ObjectId("5961830256bf9e2d0fcf13b4"), __v: 0 })
Mongoose: subs.insert({ label: '3', _id: ObjectId("5961830256bf9e2d0fcf13b5"), __v: 0 })
Mongoose: subs.insert({ label: '4', _id: ObjectId("5961830256bf9e2d0fcf13b6"), __v: 0 })
Mongoose: inners.insert({ name: '0', populate: false, _id: ObjectId("5961830256bf9e2d0fcf13b7"), subs: [ ObjectId("5961830256bf9e2d0fcf13b3"), ObjectId("5961830256bf9e2d0fcf13b4") ], __v: 0 })
Mongoose: inners.insert({ name: '2', populate: true, _id: ObjectId("5961830256bf9e2d0fcf13b8"), subs: [ ObjectId("5961830256bf9e2d0fcf13b5"), ObjectId("5961830256bf9e2d0fcf13b6") ], __v: 0 })
Mongoose: outers.insert({ title: '1', _id: ObjectId("5961830256bf9e2d0fcf13b9"), inners: [ ObjectId("5961830256bf9e2d0fcf13b7") ], __v: 0 })
Mongoose: outers.insert({ title: '2', _id: ObjectId("5961830256bf9e2d0fcf13ba"), inners: [ ObjectId("5961830256bf9e2d0fcf13b8") ], __v: 0 })
Mongoose: outers.find({}, { fields: {} })
Mongoose: inners.find({ _id: { '$in': [ ObjectId("5961830256bf9e2d0fcf13b7"), ObjectId("5961830256bf9e2d0fcf13b8") ] } }, { fields: {} })
Mongoose: subs.find({ _id: { '$in': [ ObjectId("5961830256bf9e2d0fcf13b5"), ObjectId("5961830256bf9e2d0fcf13b6") ] } }, { fields: {} })
[
{
"_id": "5961830256bf9e2d0fcf13b9",
"title": "1",
"__v": 0,
"inners": [
{
"_id": "5961830256bf9e2d0fcf13b7",
"name": "0",
"populate": false,
"__v": 0,
"subs": [
"5961830256bf9e2d0fcf13b3",
"5961830256bf9e2d0fcf13b4"
]
}
]
},
{
"_id": "5961830256bf9e2d0fcf13ba",
"title": "2",
"__v": 0,
"inners": [
{
"_id": "5961830256bf9e2d0fcf13b8",
"name": "2",
"populate": true,
"__v": 0,
"subs": [
{
"_id": "5961830256bf9e2d0fcf13b5",
"label": "3",
"__v": 0
},
{
"_id": "5961830256bf9e2d0fcf13b6",
"label": "4",
"__v": 0
}
]
}
]
}
]
Mongoose: outers.find({}, { fields: {} })
Mongoose: inners.find({ _id: { '$in': [ ObjectId("5961830256bf9e2d0fcf13b7"), ObjectId("5961830256bf9e2d0fcf13b8") ] } }, { fields: {} })
Mongoose: subs.find({ _id: { '$in': [ ObjectId("5961830256bf9e2d0fcf13b5"), ObjectId("5961830256bf9e2d0fcf13b6") ] } }, { fields: {} })
[
{
"_id": "5961830256bf9e2d0fcf13b9",
"title": "1",
"__v": 0,
"inners": [
{
"_id": "5961830256bf9e2d0fcf13b7",
"name": "0",
"populate": false,
"__v": 0,
"subs": [
"5961830256bf9e2d0fcf13b3",
"5961830256bf9e2d0fcf13b4"
]
}
]
},
{
"_id": "5961830256bf9e2d0fcf13ba",
"title": "2",
"__v": 0,
"inners": [
{
"_id": "5961830256bf9e2d0fcf13b8",
"name": "2",
"populate": true,
"__v": 0,
"subs": [
{
"_id": "5961830256bf9e2d0fcf13b5",
"label": "3",
"__v": 0
},
{
"_id": "5961830256bf9e2d0fcf13b6",
"label": "4",
"__v": 0
}
]
}
]
}
]
So you can see there that in much the same way there is a "boolean" field which is being tested to determine whether to perform a .populate() or just return the plain data instead.

Related

why insertmany not working using mongoos with transactions?

I am trying to insert data using inertMany .but I am not able to insert the data why ?I am using mongoose session if any error occurred then I roll back changes
https://codesandbox.io/s/dreamy-bell-9u0bz
app.get("/saveData", async (req, res, next) => {
const session = await mongoose.startSession();
session.startTransaction();
try {
const data = [
{
empid: "Ad",
id: 4,
date: "19-Jul-2019"
},
{
empid: "Bc",
id: 56,
date: "18-Jul-2019"
},
{
empid: "C",
id: 6,
date: "11-Jul-2019"
}
];
console.log("before save");
let saveBlog = await BlogPostModel.insertMany(data, { session }); //when fail its goes to catch
await session.commitTransaction();
return res.send(saveBlog);
} catch (error) {
console.log(error);
await session.abortTransaction();
return res.status(400).send(error);
}
});
Since you don't appear to have understood the marked duplicate or the comment on your last question, here's a direct demonstration:
const { Schema } = mongoose = require('mongoose');
const uri = 'mongodb://localhost:27017/test';
const opts = { useNewUrlParser: true, useUnifiedTopology: true };
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
mongoose.set('useCreateIndex', true);
mongoose.set('useFindAndModify', false);
const blogPostSchema = new Schema({
id: { type: Number, unique: true },
empid: String,
date: Date
});
const BlogPost = mongoose.model('BlogPost', blogPostSchema);
const sampleData = [
{ empid: "test13", id: 6, date: '11-Jul-2019' },
{ empid: "test123", id: 4, date: '19-Jul-2019' },
{ empid: "test13", id: 4, date: '18-Jul-2019' }
];
const log = data => console.log(JSON.stringify(data, undefined, 2));
(async function() {
try {
const conn = await mongoose.connect(uri, opts);
// Clean data
await Promise.all(
Object.values(conn.models).map(m => m.deleteMany())
);
// Collections must existi in transactions
await Promise.all(
Object.values(conn.models).map(m => m.createCollection())
);
// With Transaction
log("With Transaction");
let session = await conn.startSession();
session.startTransaction();
try {
await BlogPost.insertMany(sampleData, { session });
await session.commitTransaction();
} catch(e) {
// Show the error and abort
log({ err: e.errmsg, result: e.result.result.writeErrors });
await session.abortTransaction();
}
log({ results: (await BlogPost.find()) });
// No transaction
log("Without Transaction");
try {
await BlogPost.insertMany(sampleData);
} catch(e) {
// Show the error
log({ err: e.errmsg, result: e.result.result.writeErrors });
}
log({ results: (await BlogPost.find()) });
} catch (e) {
console.error(e);
} finally {
mongoose.disconnect();
}
})();
And the output:
Mongoose: blogposts.createIndex({ id: 1 }, { unique: true, background: true })
Mongoose: blogposts.deleteMany({}, {})
"With Transaction"
Mongoose: blogposts.insertMany([ { _id: 5d8f28ac462a1e1a8c6838a2, empid: 'test13', id: 6, date: 2019-07-10T14:00:00.000Z, __v: 0 }, { _id: 5d8f28ac462a1e1a8c6838a3, empid: 'test123', id: 4, date: 2019-07-18T14:00:00.000Z, __v: 0 }, { _id: 5d8f28ac462a1e1a8c6838a4, empid: 'test13', id: 4, date: 2019-07-17T14:00:00.000Z, __v: 0 } ], { session: ClientSession("650da06d23544ef8bc1d345d93331d1e") })
{
"err": "E11000 duplicate key error collection: test.blogposts index: id_1 dup key: { id: 4 }",
"result": [
{
"code": 11000,
"index": 2,
"errmsg": "E11000 duplicate key error collection: test.blogposts index: id_1 dup key: { id: 4 }",
"op": {
"_id": "5d8f28ac462a1e1a8c6838a4",
"empid": "test13",
"id": 4,
"date": "2019-07-17T14:00:00.000Z",
"__v": 0
}
}
]
}
Mongoose: blogposts.find({}, { projection: {} })
{
"results": []
}
"Without Transaction"
Mongoose: blogposts.insertMany([ { _id: 5d8f28ac462a1e1a8c6838a5, empid: 'test13', id: 6, date: 2019-07-10T14:00:00.000Z, __v: 0 }, { _id: 5d8f28ac462a1e1a8c6838a6, empid: 'test123', id: 4, date: 2019-07-18T14:00:00.000Z, __v: 0 }, { _id: 5d8f28ac462a1e1a8c6838a7, empid: 'test13', id: 4, date: 2019-07-17T14:00:00.000Z, __v: 0 } ], {})
{
"err": "E11000 duplicate key error collection: test.blogposts index: id_1 dup key: { id: 4 }",
"result": [
{
"code": 11000,
"index": 2,
"errmsg": "E11000 duplicate key error collection: test.blogposts index: id_1 dup key: { id: 4 }",
"op": {
"_id": "5d8f28ac462a1e1a8c6838a7",
"empid": "test13",
"id": 4,
"date": "2019-07-17T14:00:00.000Z",
"__v": 0
}
}
]
}
Mongoose: blogposts.find({}, { projection: {} })
{
"results": [
{
"_id": "5d8f28ac462a1e1a8c6838a5",
"empid": "test13",
"id": 6,
"date": "2019-07-10T14:00:00.000Z",
"__v": 0
},
{
"_id": "5d8f28ac462a1e1a8c6838a6",
"empid": "test123",
"id": 4,
"date": "2019-07-18T14:00:00.000Z",
"__v": 0
}
]
}
Note that when the transaction is in use there are no items inserted into the collection. Using the insertMany() with the default behavior of ordered: true will insert all batched items up until the point any error is encountered.
Note also as stated since you are indeed expecting an error you must include such a statement in it very own try..catch or similar error handler. Otherwise any error ( which is expected in the example case ) would simply fall to the outer catch, which of course in the demonstration simply exits the program.
Not actually in the question itself but something not actually mentioned in the demonstrations of How to use MongoDB transaction using Mongoose? is indeed that you should be aware that whlist a transaction is active you must also include the session attribute on any subsequent reads in order to see the changes made within that transaction.
For instance, the following would show no content in a collection:
let session = await conn.startSession();
session.startTransaction();
try {
await BlogPost.insertMany(sampleData, { session });
let documents = await BlogPost.find(); // This would return nothing
await session.commitTransaction();
} catch(e) {
// Show the error and abort
log({ err: e.errmsg, result: e.result.result.writeErrors });
await session.abortTransaction();
}
However including the session within a find() will actually show what is inserted:
try {
await BlogPost.insertMany(sampleData, { session });
// Actually includes the session and therefore the state
let documents = await BlogPost.find({},{ session });
await session.commitTransaction();
} catch(e) {
// Show the error and abort
log({ err: e.errmsg, result: e.result.result.writeErrors });
await session.abortTransaction();
}
And of course that read would in this case be dependent on the insertMany() not failing for any reason, since any error would result in exiting to the catch before the next request was made.
Once a transaction is committed, it is of course available to the global state of the connection. But whilst in progress only operations which include the same session information on which the transaction was started will have visibility of any changes implemented within that transaction.
For who get the error "Cannot read property 'map' of undefined" while passing session as option in inserMany, this errors come because your mongo is running as standalone servers, to fix this can refer npm package run-rs or following this answer to fix this: https://stackoverflow.com/a/60603587/9611273

Sequelize many-to-many self association

I am trying to create a model Users with many-to-many association to itself to allow users to follow another users. In one query I want to retrieve the Users followed by the current user; in another query I want to retrieve the people that follows the current user.
This is my Users model:
module.exports = (sequelize, Sequelize) => {
const Users = sequelize.define(
'Users',
{
id: {
type: Sequelize.INTEGER,
autoIncrement: true,
primaryKey: true,
},
name: {
type: Sequelize.STRING,
},
},
);
Users.associate = function(models) {
Users.belongsToMany(Users, { as: 'following', through: models.UsersUsers });
};
return Users;
};
I declare UsersUsers, just in case I need to add any field there:
module.exports = (sequelize, Sequelize) => {
const UsersUsers = sequelize.define(
'UsersUsers',
{}
);
UsersUsers.associate = function(models) {};
return UsersUsers;
};
Then I query Users as:
models.Users.findOne({
where: {
id: req.params.id,
},
include: [
{
model: models.Users,
as: 'following',
},
],
})
.then((results) => {
return res.send({
User: results,
});
})
.catch((error) => {
return res.send(String(error));
});
And I get this result:
{
"User": {
"id": 1,
"name": "User1",
"following": [
{
"id": 2,
"name": "User2",
"UsersUsers": {
"UserId": 1,
"followingId": 2
}
},
{
"id": 3,
"name": "User3",
"UsersUsers": {
"UserId": 1,
"followingId": 3
}
},
{
"id": 4,
"name": "User4",
"UsersUsers": {
"UserId": 1,
"followingId": 4
}
}
]
}
}
Now the questions:
In my current query, how do I exclude "UsersUsers" from the result? attributes: { exclude: ['UsersUsers'] } did not work…
How do I create a query to retrieve the current user with the users that follows him instead of the users followed by him?
Thanks!
--
EDIT:
The solution for the question 1. is to add through: { attributes: [] } to the included model:
models.Users.findOne({
where: {
id: req.params.id,
},
include: [
{
model: models.Users,
as: 'following',
through: {
attributes: [],
},
},
],
})
.then((results) => {
return res.send({
User: results,
});
})
.catch((error) => {
return res.send(String(error));
});
Still pending question 2!
Users.findAll({
include: [
{
model: models.Users,
as: 'following',
through: {
attributes: [],
},
},
],
where : {
id : [connection.literal(` write raw sql query to get followingId here`)]
}
})
.then(result => {
res.json(result);
}).catch(error=>{
res.json(error);
});
I'm not sure if this gonna work, still play around this and do let me know if this worked or if you found any solution.

Mongoose get collection, where ref [duplicate]

I'm pretty new to Mongoose and MongoDB in general so I'm having a difficult time figuring out if something like this is possible:
Item = new Schema({
id: Schema.ObjectId,
dateCreated: { type: Date, default: Date.now },
title: { type: String, default: 'No Title' },
description: { type: String, default: 'No Description' },
tags: [ { type: Schema.ObjectId, ref: 'ItemTag' }]
});
ItemTag = new Schema({
id: Schema.ObjectId,
tagId: { type: Schema.ObjectId, ref: 'Tag' },
tagName: { type: String }
});
var query = Models.Item.find({});
query
.desc('dateCreated')
.populate('tags')
.where('tags.tagName').in(['funny', 'politics'])
.run(function(err, docs){
// docs is always empty
});
Is there a better way do this?
Edit
Apologies for any confusion. What I'm trying to do is get all Items that contain either the funny tag or politics tag.
Edit
Document without where clause:
[{
_id: 4fe90264e5caa33f04000012,
dislikes: 0,
likes: 0,
source: '/uploads/loldog.jpg',
comments: [],
tags: [{
itemId: 4fe90264e5caa33f04000012,
tagName: 'movies',
tagId: 4fe64219007e20e644000007,
_id: 4fe90270e5caa33f04000015,
dateCreated: Tue, 26 Jun 2012 00:29:36 GMT,
rating: 0,
dislikes: 0,
likes: 0
},
{
itemId: 4fe90264e5caa33f04000012,
tagName: 'funny',
tagId: 4fe64219007e20e644000002,
_id: 4fe90270e5caa33f04000017,
dateCreated: Tue, 26 Jun 2012 00:29:36 GMT,
rating: 0,
dislikes: 0,
likes: 0
}],
viewCount: 0,
rating: 0,
type: 'image',
description: null,
title: 'dogggg',
dateCreated: Tue, 26 Jun 2012 00:29:24 GMT
}, ... ]
With the where clause, I get an empty array.
With a modern MongoDB greater than 3.2 you can use $lookup as an alternate to .populate() in most cases. This also has the advantage of actually doing the join "on the server" as opposed to what .populate() does which is actually "multiple queries" to "emulate" a join.
So .populate() is not really a "join" in the sense of how a relational database does it. The $lookup operator on the other hand, actually does the work on the server, and is more or less analogous to a "LEFT JOIN":
Item.aggregate(
[
{ "$lookup": {
"from": ItemTags.collection.name,
"localField": "tags",
"foreignField": "_id",
"as": "tags"
}},
{ "$unwind": "$tags" },
{ "$match": { "tags.tagName": { "$in": [ "funny", "politics" ] } } },
{ "$group": {
"_id": "$_id",
"dateCreated": { "$first": "$dateCreated" },
"title": { "$first": "$title" },
"description": { "$first": "$description" },
"tags": { "$push": "$tags" }
}}
],
function(err, result) {
// "tags" is now filtered by condition and "joined"
}
)
N.B. The .collection.name here actually evaluates to the "string" that is the actual name of the MongoDB collection as assigned to the model. Since mongoose "pluralizes" collection names by default and $lookup needs the actual MongoDB collection name as an argument ( since it's a server operation ), then this is a handy trick to use in mongoose code, as opposed to "hard coding" the collection name directly.
Whilst we could also use $filter on arrays to remove the unwanted items, this is actually the most efficient form due to Aggregation Pipeline Optimization for the special condition of as $lookup followed by both an $unwind and a $match condition.
This actually results in the three pipeline stages being rolled into one:
{ "$lookup" : {
"from" : "itemtags",
"as" : "tags",
"localField" : "tags",
"foreignField" : "_id",
"unwinding" : {
"preserveNullAndEmptyArrays" : false
},
"matching" : {
"tagName" : {
"$in" : [
"funny",
"politics"
]
}
}
}}
This is highly optimal as the actual operation "filters the collection to join first", then it returns the results and "unwinds" the array. Both methods are employed so the results do not break the BSON limit of 16MB, which is a constraint that the client does not have.
The only problem is that it seems "counter-intuitive" in some ways, particularly when you want the results in an array, but that is what the $group is for here, as it reconstructs to the original document form.
It's also unfortunate that we simply cannot at this time actually write $lookup in the same eventual syntax the server uses. IMHO, this is an oversight to be corrected. But for now, simply using the sequence will work and is the most viable option with the best performance and scalability.
Addendum - MongoDB 3.6 and upwards
Though the pattern shown here is fairly optimized due to how the other stages get rolled into the $lookup, it does have one failing in that the "LEFT JOIN" which is normally inherent to both $lookup and the actions of populate() is negated by the "optimal" usage of $unwind here which does not preserve empty arrays. You can add the preserveNullAndEmptyArrays option, but this negates the "optimized" sequence described above and essentially leaves all three stages intact which would normally be combined in the optimization.
MongoDB 3.6 expands with a "more expressive" form of $lookup allowing a "sub-pipeline" expression. Which not only meets the goal of retaining the "LEFT JOIN" but still allows an optimal query to reduce results returned and with a much simplified syntax:
Item.aggregate([
{ "$lookup": {
"from": ItemTags.collection.name,
"let": { "tags": "$tags" },
"pipeline": [
{ "$match": {
"tags": { "$in": [ "politics", "funny" ] },
"$expr": { "$in": [ "$_id", "$$tags" ] }
}}
]
}}
])
The $expr used in order to match the declared "local" value with the "foreign" value is actually what MongoDB does "internally" now with the original $lookup syntax. By expressing in this form we can tailor the initial $match expression within the "sub-pipeline" ourselves.
In fact, as a true "aggregation pipeline" you can do just about anything you can do with an aggregation pipeline within this "sub-pipeline" expression, including "nesting" the levels of $lookup to other related collections.
Further usage is a bit beyond the scope of what the question here asks, but in relation to even "nested population" then the new usage pattern of $lookup allows this to be much the same, and a "lot" more powerful in it's full usage.
Working Example
The following gives an example using a static method on the model. Once that static method is implemented the call simply becomes:
Item.lookup(
{
path: 'tags',
query: { 'tags.tagName' : { '$in': [ 'funny', 'politics' ] } }
},
callback
)
Or enhancing to be a bit more modern even becomes:
let results = await Item.lookup({
path: 'tags',
query: { 'tagName' : { '$in': [ 'funny', 'politics' ] } }
})
Making it very similar to .populate() in structure, but it's actually doing the join on the server instead. For completeness, the usage here casts the returned data back to mongoose document instances at according to both the parent and child cases.
It's fairly trivial and easy to adapt or just use as is for most common cases.
N.B The use of async here is just for brevity of running the enclosed example. The actual implementation is free of this dependency.
const async = require('async'),
mongoose = require('mongoose'),
Schema = mongoose.Schema;
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
mongoose.connect('mongodb://localhost/looktest');
const itemTagSchema = new Schema({
tagName: String
});
const itemSchema = new Schema({
dateCreated: { type: Date, default: Date.now },
title: String,
description: String,
tags: [{ type: Schema.Types.ObjectId, ref: 'ItemTag' }]
});
itemSchema.statics.lookup = function(opt,callback) {
let rel =
mongoose.model(this.schema.path(opt.path).caster.options.ref);
let group = { "$group": { } };
this.schema.eachPath(p =>
group.$group[p] = (p === "_id") ? "$_id" :
(p === opt.path) ? { "$push": `$${p}` } : { "$first": `$${p}` });
let pipeline = [
{ "$lookup": {
"from": rel.collection.name,
"as": opt.path,
"localField": opt.path,
"foreignField": "_id"
}},
{ "$unwind": `$${opt.path}` },
{ "$match": opt.query },
group
];
this.aggregate(pipeline,(err,result) => {
if (err) callback(err);
result = result.map(m => {
m[opt.path] = m[opt.path].map(r => rel(r));
return this(m);
});
callback(err,result);
});
}
const Item = mongoose.model('Item', itemSchema);
const ItemTag = mongoose.model('ItemTag', itemTagSchema);
function log(body) {
console.log(JSON.stringify(body, undefined, 2))
}
async.series(
[
// Clean data
(callback) => async.each(mongoose.models,(model,callback) =>
model.remove({},callback),callback),
// Create tags and items
(callback) =>
async.waterfall(
[
(callback) =>
ItemTag.create([{ "tagName": "movies" }, { "tagName": "funny" }],
callback),
(tags, callback) =>
Item.create({ "title": "Something","description": "An item",
"tags": tags },callback)
],
callback
),
// Query with our static
(callback) =>
Item.lookup(
{
path: 'tags',
query: { 'tags.tagName' : { '$in': [ 'funny', 'politics' ] } }
},
callback
)
],
(err,results) => {
if (err) throw err;
let result = results.pop();
log(result);
mongoose.disconnect();
}
)
Or a little more modern for Node 8.x and above with async/await and no additional dependencies:
const { Schema } = mongoose = require('mongoose');
const uri = 'mongodb://localhost/looktest';
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
const itemTagSchema = new Schema({
tagName: String
});
const itemSchema = new Schema({
dateCreated: { type: Date, default: Date.now },
title: String,
description: String,
tags: [{ type: Schema.Types.ObjectId, ref: 'ItemTag' }]
});
itemSchema.statics.lookup = function(opt) {
let rel =
mongoose.model(this.schema.path(opt.path).caster.options.ref);
let group = { "$group": { } };
this.schema.eachPath(p =>
group.$group[p] = (p === "_id") ? "$_id" :
(p === opt.path) ? { "$push": `$${p}` } : { "$first": `$${p}` });
let pipeline = [
{ "$lookup": {
"from": rel.collection.name,
"as": opt.path,
"localField": opt.path,
"foreignField": "_id"
}},
{ "$unwind": `$${opt.path}` },
{ "$match": opt.query },
group
];
return this.aggregate(pipeline).exec().then(r => r.map(m =>
this({ ...m, [opt.path]: m[opt.path].map(r => rel(r)) })
));
}
const Item = mongoose.model('Item', itemSchema);
const ItemTag = mongoose.model('ItemTag', itemTagSchema);
const log = body => console.log(JSON.stringify(body, undefined, 2));
(async function() {
try {
const conn = await mongoose.connect(uri);
// Clean data
await Promise.all(Object.entries(conn.models).map(([k,m]) => m.remove()));
// Create tags and items
const tags = await ItemTag.create(
["movies", "funny"].map(tagName =>({ tagName }))
);
const item = await Item.create({
"title": "Something",
"description": "An item",
tags
});
// Query with our static
const result = (await Item.lookup({
path: 'tags',
query: { 'tags.tagName' : { '$in': [ 'funny', 'politics' ] } }
})).pop();
log(result);
mongoose.disconnect();
} catch (e) {
console.error(e);
} finally {
process.exit()
}
})()
And from MongoDB 3.6 and upward, even without the $unwind and $group building:
const { Schema, Types: { ObjectId } } = mongoose = require('mongoose');
const uri = 'mongodb://localhost/looktest';
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
const itemTagSchema = new Schema({
tagName: String
});
const itemSchema = new Schema({
title: String,
description: String,
tags: [{ type: Schema.Types.ObjectId, ref: 'ItemTag' }]
},{ timestamps: true });
itemSchema.statics.lookup = function({ path, query }) {
let rel =
mongoose.model(this.schema.path(path).caster.options.ref);
// MongoDB 3.6 and up $lookup with sub-pipeline
let pipeline = [
{ "$lookup": {
"from": rel.collection.name,
"as": path,
"let": { [path]: `$${path}` },
"pipeline": [
{ "$match": {
...query,
"$expr": { "$in": [ "$_id", `$$${path}` ] }
}}
]
}}
];
return this.aggregate(pipeline).exec().then(r => r.map(m =>
this({ ...m, [path]: m[path].map(r => rel(r)) })
));
};
const Item = mongoose.model('Item', itemSchema);
const ItemTag = mongoose.model('ItemTag', itemTagSchema);
const log = body => console.log(JSON.stringify(body, undefined, 2));
(async function() {
try {
const conn = await mongoose.connect(uri);
// Clean data
await Promise.all(Object.entries(conn.models).map(([k,m]) => m.remove()));
// Create tags and items
const tags = await ItemTag.insertMany(
["movies", "funny"].map(tagName => ({ tagName }))
);
const item = await Item.create({
"title": "Something",
"description": "An item",
tags
});
// Query with our static
let result = (await Item.lookup({
path: 'tags',
query: { 'tagName': { '$in': [ 'funny', 'politics' ] } }
})).pop();
log(result);
await mongoose.disconnect();
} catch(e) {
console.error(e)
} finally {
process.exit()
}
})()
what you are asking for isn't directly supported but can be achieved by adding another filter step after the query returns.
first, .populate( 'tags', null, { tagName: { $in: ['funny', 'politics'] } } ) is definitely what you need to do to filter the tags documents. then, after the query returns you'll need to manually filter out documents that don't have any tags docs that matched the populate criteria. something like:
query....
.exec(function(err, docs){
docs = docs.filter(function(doc){
return doc.tags.length;
})
// do stuff with docs
});
Try replacing
.populate('tags').where('tags.tagName').in(['funny', 'politics'])
by
.populate( 'tags', null, { tagName: { $in: ['funny', 'politics'] } } )
Update: Please take a look at the comments - this answer does not correctly match to the question, but maybe it answers other questions of users which came across (I think that because of the upvotes) so I will not delete this "answer":
First: I know this question is really outdated, but I searched for exactly this problem and this SO post was the Google entry #1. So I implemented the docs.filter version (accepted answer) but as I read in the mongoose v4.6.0 docs we can now simply use:
Item.find({}).populate({
path: 'tags',
match: { tagName: { $in: ['funny', 'politics'] }}
}).exec((err, items) => {
console.log(items.tags)
// contains only tags where tagName is 'funny' or 'politics'
})
Hope this helps future search machine users.
After having the same problem myself recently, I've come up with the following solution:
First, find all ItemTags where tagName is either 'funny' or 'politics' and return an array of ItemTag _ids.
Then, find Items which contain all ItemTag _ids in the tags array
ItemTag
.find({ tagName : { $in : ['funny','politics'] } })
.lean()
.distinct('_id')
.exec((err, itemTagIds) => {
if (err) { console.error(err); }
Item.find({ tag: { $all: itemTagIds} }, (err, items) => {
console.log(items); // Items filtered by tagName
});
});
#aaronheckmann 's answer worked for me but I had to replace return doc.tags.length; to return doc.tags != null; because that field contain null if it doesn't match with the conditions written inside populate.
So the final code:
query....
.exec(function(err, docs){
docs = docs.filter(function(doc){
return doc.tags != null;
})
// do stuff with docs
});

Mongoose, find documents that contain a value in an array of objects [duplicate]

I'm pretty new to Mongoose and MongoDB in general so I'm having a difficult time figuring out if something like this is possible:
Item = new Schema({
id: Schema.ObjectId,
dateCreated: { type: Date, default: Date.now },
title: { type: String, default: 'No Title' },
description: { type: String, default: 'No Description' },
tags: [ { type: Schema.ObjectId, ref: 'ItemTag' }]
});
ItemTag = new Schema({
id: Schema.ObjectId,
tagId: { type: Schema.ObjectId, ref: 'Tag' },
tagName: { type: String }
});
var query = Models.Item.find({});
query
.desc('dateCreated')
.populate('tags')
.where('tags.tagName').in(['funny', 'politics'])
.run(function(err, docs){
// docs is always empty
});
Is there a better way do this?
Edit
Apologies for any confusion. What I'm trying to do is get all Items that contain either the funny tag or politics tag.
Edit
Document without where clause:
[{
_id: 4fe90264e5caa33f04000012,
dislikes: 0,
likes: 0,
source: '/uploads/loldog.jpg',
comments: [],
tags: [{
itemId: 4fe90264e5caa33f04000012,
tagName: 'movies',
tagId: 4fe64219007e20e644000007,
_id: 4fe90270e5caa33f04000015,
dateCreated: Tue, 26 Jun 2012 00:29:36 GMT,
rating: 0,
dislikes: 0,
likes: 0
},
{
itemId: 4fe90264e5caa33f04000012,
tagName: 'funny',
tagId: 4fe64219007e20e644000002,
_id: 4fe90270e5caa33f04000017,
dateCreated: Tue, 26 Jun 2012 00:29:36 GMT,
rating: 0,
dislikes: 0,
likes: 0
}],
viewCount: 0,
rating: 0,
type: 'image',
description: null,
title: 'dogggg',
dateCreated: Tue, 26 Jun 2012 00:29:24 GMT
}, ... ]
With the where clause, I get an empty array.
With a modern MongoDB greater than 3.2 you can use $lookup as an alternate to .populate() in most cases. This also has the advantage of actually doing the join "on the server" as opposed to what .populate() does which is actually "multiple queries" to "emulate" a join.
So .populate() is not really a "join" in the sense of how a relational database does it. The $lookup operator on the other hand, actually does the work on the server, and is more or less analogous to a "LEFT JOIN":
Item.aggregate(
[
{ "$lookup": {
"from": ItemTags.collection.name,
"localField": "tags",
"foreignField": "_id",
"as": "tags"
}},
{ "$unwind": "$tags" },
{ "$match": { "tags.tagName": { "$in": [ "funny", "politics" ] } } },
{ "$group": {
"_id": "$_id",
"dateCreated": { "$first": "$dateCreated" },
"title": { "$first": "$title" },
"description": { "$first": "$description" },
"tags": { "$push": "$tags" }
}}
],
function(err, result) {
// "tags" is now filtered by condition and "joined"
}
)
N.B. The .collection.name here actually evaluates to the "string" that is the actual name of the MongoDB collection as assigned to the model. Since mongoose "pluralizes" collection names by default and $lookup needs the actual MongoDB collection name as an argument ( since it's a server operation ), then this is a handy trick to use in mongoose code, as opposed to "hard coding" the collection name directly.
Whilst we could also use $filter on arrays to remove the unwanted items, this is actually the most efficient form due to Aggregation Pipeline Optimization for the special condition of as $lookup followed by both an $unwind and a $match condition.
This actually results in the three pipeline stages being rolled into one:
{ "$lookup" : {
"from" : "itemtags",
"as" : "tags",
"localField" : "tags",
"foreignField" : "_id",
"unwinding" : {
"preserveNullAndEmptyArrays" : false
},
"matching" : {
"tagName" : {
"$in" : [
"funny",
"politics"
]
}
}
}}
This is highly optimal as the actual operation "filters the collection to join first", then it returns the results and "unwinds" the array. Both methods are employed so the results do not break the BSON limit of 16MB, which is a constraint that the client does not have.
The only problem is that it seems "counter-intuitive" in some ways, particularly when you want the results in an array, but that is what the $group is for here, as it reconstructs to the original document form.
It's also unfortunate that we simply cannot at this time actually write $lookup in the same eventual syntax the server uses. IMHO, this is an oversight to be corrected. But for now, simply using the sequence will work and is the most viable option with the best performance and scalability.
Addendum - MongoDB 3.6 and upwards
Though the pattern shown here is fairly optimized due to how the other stages get rolled into the $lookup, it does have one failing in that the "LEFT JOIN" which is normally inherent to both $lookup and the actions of populate() is negated by the "optimal" usage of $unwind here which does not preserve empty arrays. You can add the preserveNullAndEmptyArrays option, but this negates the "optimized" sequence described above and essentially leaves all three stages intact which would normally be combined in the optimization.
MongoDB 3.6 expands with a "more expressive" form of $lookup allowing a "sub-pipeline" expression. Which not only meets the goal of retaining the "LEFT JOIN" but still allows an optimal query to reduce results returned and with a much simplified syntax:
Item.aggregate([
{ "$lookup": {
"from": ItemTags.collection.name,
"let": { "tags": "$tags" },
"pipeline": [
{ "$match": {
"tags": { "$in": [ "politics", "funny" ] },
"$expr": { "$in": [ "$_id", "$$tags" ] }
}}
]
}}
])
The $expr used in order to match the declared "local" value with the "foreign" value is actually what MongoDB does "internally" now with the original $lookup syntax. By expressing in this form we can tailor the initial $match expression within the "sub-pipeline" ourselves.
In fact, as a true "aggregation pipeline" you can do just about anything you can do with an aggregation pipeline within this "sub-pipeline" expression, including "nesting" the levels of $lookup to other related collections.
Further usage is a bit beyond the scope of what the question here asks, but in relation to even "nested population" then the new usage pattern of $lookup allows this to be much the same, and a "lot" more powerful in it's full usage.
Working Example
The following gives an example using a static method on the model. Once that static method is implemented the call simply becomes:
Item.lookup(
{
path: 'tags',
query: { 'tags.tagName' : { '$in': [ 'funny', 'politics' ] } }
},
callback
)
Or enhancing to be a bit more modern even becomes:
let results = await Item.lookup({
path: 'tags',
query: { 'tagName' : { '$in': [ 'funny', 'politics' ] } }
})
Making it very similar to .populate() in structure, but it's actually doing the join on the server instead. For completeness, the usage here casts the returned data back to mongoose document instances at according to both the parent and child cases.
It's fairly trivial and easy to adapt or just use as is for most common cases.
N.B The use of async here is just for brevity of running the enclosed example. The actual implementation is free of this dependency.
const async = require('async'),
mongoose = require('mongoose'),
Schema = mongoose.Schema;
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
mongoose.connect('mongodb://localhost/looktest');
const itemTagSchema = new Schema({
tagName: String
});
const itemSchema = new Schema({
dateCreated: { type: Date, default: Date.now },
title: String,
description: String,
tags: [{ type: Schema.Types.ObjectId, ref: 'ItemTag' }]
});
itemSchema.statics.lookup = function(opt,callback) {
let rel =
mongoose.model(this.schema.path(opt.path).caster.options.ref);
let group = { "$group": { } };
this.schema.eachPath(p =>
group.$group[p] = (p === "_id") ? "$_id" :
(p === opt.path) ? { "$push": `$${p}` } : { "$first": `$${p}` });
let pipeline = [
{ "$lookup": {
"from": rel.collection.name,
"as": opt.path,
"localField": opt.path,
"foreignField": "_id"
}},
{ "$unwind": `$${opt.path}` },
{ "$match": opt.query },
group
];
this.aggregate(pipeline,(err,result) => {
if (err) callback(err);
result = result.map(m => {
m[opt.path] = m[opt.path].map(r => rel(r));
return this(m);
});
callback(err,result);
});
}
const Item = mongoose.model('Item', itemSchema);
const ItemTag = mongoose.model('ItemTag', itemTagSchema);
function log(body) {
console.log(JSON.stringify(body, undefined, 2))
}
async.series(
[
// Clean data
(callback) => async.each(mongoose.models,(model,callback) =>
model.remove({},callback),callback),
// Create tags and items
(callback) =>
async.waterfall(
[
(callback) =>
ItemTag.create([{ "tagName": "movies" }, { "tagName": "funny" }],
callback),
(tags, callback) =>
Item.create({ "title": "Something","description": "An item",
"tags": tags },callback)
],
callback
),
// Query with our static
(callback) =>
Item.lookup(
{
path: 'tags',
query: { 'tags.tagName' : { '$in': [ 'funny', 'politics' ] } }
},
callback
)
],
(err,results) => {
if (err) throw err;
let result = results.pop();
log(result);
mongoose.disconnect();
}
)
Or a little more modern for Node 8.x and above with async/await and no additional dependencies:
const { Schema } = mongoose = require('mongoose');
const uri = 'mongodb://localhost/looktest';
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
const itemTagSchema = new Schema({
tagName: String
});
const itemSchema = new Schema({
dateCreated: { type: Date, default: Date.now },
title: String,
description: String,
tags: [{ type: Schema.Types.ObjectId, ref: 'ItemTag' }]
});
itemSchema.statics.lookup = function(opt) {
let rel =
mongoose.model(this.schema.path(opt.path).caster.options.ref);
let group = { "$group": { } };
this.schema.eachPath(p =>
group.$group[p] = (p === "_id") ? "$_id" :
(p === opt.path) ? { "$push": `$${p}` } : { "$first": `$${p}` });
let pipeline = [
{ "$lookup": {
"from": rel.collection.name,
"as": opt.path,
"localField": opt.path,
"foreignField": "_id"
}},
{ "$unwind": `$${opt.path}` },
{ "$match": opt.query },
group
];
return this.aggregate(pipeline).exec().then(r => r.map(m =>
this({ ...m, [opt.path]: m[opt.path].map(r => rel(r)) })
));
}
const Item = mongoose.model('Item', itemSchema);
const ItemTag = mongoose.model('ItemTag', itemTagSchema);
const log = body => console.log(JSON.stringify(body, undefined, 2));
(async function() {
try {
const conn = await mongoose.connect(uri);
// Clean data
await Promise.all(Object.entries(conn.models).map(([k,m]) => m.remove()));
// Create tags and items
const tags = await ItemTag.create(
["movies", "funny"].map(tagName =>({ tagName }))
);
const item = await Item.create({
"title": "Something",
"description": "An item",
tags
});
// Query with our static
const result = (await Item.lookup({
path: 'tags',
query: { 'tags.tagName' : { '$in': [ 'funny', 'politics' ] } }
})).pop();
log(result);
mongoose.disconnect();
} catch (e) {
console.error(e);
} finally {
process.exit()
}
})()
And from MongoDB 3.6 and upward, even without the $unwind and $group building:
const { Schema, Types: { ObjectId } } = mongoose = require('mongoose');
const uri = 'mongodb://localhost/looktest';
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
const itemTagSchema = new Schema({
tagName: String
});
const itemSchema = new Schema({
title: String,
description: String,
tags: [{ type: Schema.Types.ObjectId, ref: 'ItemTag' }]
},{ timestamps: true });
itemSchema.statics.lookup = function({ path, query }) {
let rel =
mongoose.model(this.schema.path(path).caster.options.ref);
// MongoDB 3.6 and up $lookup with sub-pipeline
let pipeline = [
{ "$lookup": {
"from": rel.collection.name,
"as": path,
"let": { [path]: `$${path}` },
"pipeline": [
{ "$match": {
...query,
"$expr": { "$in": [ "$_id", `$$${path}` ] }
}}
]
}}
];
return this.aggregate(pipeline).exec().then(r => r.map(m =>
this({ ...m, [path]: m[path].map(r => rel(r)) })
));
};
const Item = mongoose.model('Item', itemSchema);
const ItemTag = mongoose.model('ItemTag', itemTagSchema);
const log = body => console.log(JSON.stringify(body, undefined, 2));
(async function() {
try {
const conn = await mongoose.connect(uri);
// Clean data
await Promise.all(Object.entries(conn.models).map(([k,m]) => m.remove()));
// Create tags and items
const tags = await ItemTag.insertMany(
["movies", "funny"].map(tagName => ({ tagName }))
);
const item = await Item.create({
"title": "Something",
"description": "An item",
tags
});
// Query with our static
let result = (await Item.lookup({
path: 'tags',
query: { 'tagName': { '$in': [ 'funny', 'politics' ] } }
})).pop();
log(result);
await mongoose.disconnect();
} catch(e) {
console.error(e)
} finally {
process.exit()
}
})()
what you are asking for isn't directly supported but can be achieved by adding another filter step after the query returns.
first, .populate( 'tags', null, { tagName: { $in: ['funny', 'politics'] } } ) is definitely what you need to do to filter the tags documents. then, after the query returns you'll need to manually filter out documents that don't have any tags docs that matched the populate criteria. something like:
query....
.exec(function(err, docs){
docs = docs.filter(function(doc){
return doc.tags.length;
})
// do stuff with docs
});
Try replacing
.populate('tags').where('tags.tagName').in(['funny', 'politics'])
by
.populate( 'tags', null, { tagName: { $in: ['funny', 'politics'] } } )
Update: Please take a look at the comments - this answer does not correctly match to the question, but maybe it answers other questions of users which came across (I think that because of the upvotes) so I will not delete this "answer":
First: I know this question is really outdated, but I searched for exactly this problem and this SO post was the Google entry #1. So I implemented the docs.filter version (accepted answer) but as I read in the mongoose v4.6.0 docs we can now simply use:
Item.find({}).populate({
path: 'tags',
match: { tagName: { $in: ['funny', 'politics'] }}
}).exec((err, items) => {
console.log(items.tags)
// contains only tags where tagName is 'funny' or 'politics'
})
Hope this helps future search machine users.
After having the same problem myself recently, I've come up with the following solution:
First, find all ItemTags where tagName is either 'funny' or 'politics' and return an array of ItemTag _ids.
Then, find Items which contain all ItemTag _ids in the tags array
ItemTag
.find({ tagName : { $in : ['funny','politics'] } })
.lean()
.distinct('_id')
.exec((err, itemTagIds) => {
if (err) { console.error(err); }
Item.find({ tag: { $all: itemTagIds} }, (err, items) => {
console.log(items); // Items filtered by tagName
});
});
#aaronheckmann 's answer worked for me but I had to replace return doc.tags.length; to return doc.tags != null; because that field contain null if it doesn't match with the conditions written inside populate.
So the final code:
query....
.exec(function(err, docs){
docs = docs.filter(function(doc){
return doc.tags != null;
})
// do stuff with docs
});

Merge collections to get average rating, but still get all of original collection before a rating has been given. Mongoose / nodejs

I have an admin that creates an item (a bourbon), that users can give a comment on and rating within that comment. I was able to aggregate the comments, but was not able to display newly-created bourbons, only those already seeded that already had ratings. i've tried to implement something similar to this thread: referenced thread, but i'm am not doing something correct.
I'm a noob, and mainly play in the frontend, and am very confused about how I should change this sample code into actual production code. I see what each function is doing, but still fuzzy on it.
Should I do an async.each and set the aggregate function as the iterator..? I know this is busted. I've tried a few things now. Keep getting a 500 error, nothing on the console.log. Any help for this noob, much appreciated.
Here are my schemas:
bourbon:
'use strict';
var mongoose = require('mongoose'),
BourbonSchema = null;
module.exports = mongoose.model('Bourbon', {
name: {type: String, required: true},
blog: {type: String, required: true},
photo: {type: String, required: true, default:'http://aries-wineny.com/wp-content/uploads/2014/09/woodford-reserve.jpg'},
location: {type: String, required: true},
distillery: {type: String, required: true},
avgRating: {type: Number}
});
var Bourbon = mongoose.model('Bourbon', BourbonSchema);
module.exports = Bourbon;
comments:
'use strict';
var mongoose = require('mongoose');
module.exports = mongoose.model('Comment', {
bourbonId: {type: mongoose.Schema.ObjectId, ref: 'Bourbon'},
userId: {type: mongoose.Schema.ObjectId, ref: 'User'},
text: {type: String, required: true},
createdAt: {type: Date, required: true, default: Date.now},
rating : {type: Number, required: true},
votes: {type: Number, default: 0}
});
this is my find / get in the controller, which I tried piecing together from the referenced link, but is slopp now:
'use strict';
var Bourbon = require('../../../models/bourbon'),
Comment = require('../../../models/comment'),
DataStore = require('nedb'),
db = new DataStore(),
async = require('async');
module.exports = {
description: 'Get Bourbons',
notes: 'Get Bourbons',
tags: ['bourbons'],
handler: function(request, reply){
async.waterfall(
[
function(comment,callback){
async.series(
[
function(callback){
Bourbon.find({},function(err,results){
async.eachLimit(results,10,function(result,callback){
db.insert({
'bourbonId': result._id.toString(),
'location' : result.location,
'blog' : result.blog,
'distillery': result.distillery,
'name': result.name,
'avgRating': 0
},callback);
},callback);
});
},
function(callback){
Comment.aggregate(
[
{'$group':{
'_id': '$bourbonId',
'avgRating':{
'$avg':'$rating'
}
}}
],
function(err,results){
async.eachLimit(results,10,function(result,callback){
db.update(
{'bourbonId': result._id.toString()},
{'$set':{
'avgRating': result.avgRating
}
},
callback
);
},callback);
}
);
}
],
function(err) {
if (err) callback(err);
db.find({},{'_id': 0},callback);
}
);
}
],
function(err,results){
reply({results: results});
console.log('LOOOOOOOOOOOOOOOOK',JSON.stringify(results, undefined, 4));
process.exit();
});
}
};
Seems you have more to learn about callbacks and sequencing. This is all the code inside your request handler that you need. Of course changing to send the response when you see what is happening.
async.series(
[
// List out exiting collection with 0 average
function(callback) {
Bourbon.find({},function(err,results){
async.eachLimit(results,10,function(result,callback){
var plain = result.toObject()
plain.bourbonId = plain._id.toString();
plain.avgRating = 0;
delete plain._id;
db.insert(plain,callback); // next each iteration
},callback); // move to next in series
});
},
// Get aggregate results and update the items you just wrote
function(callback) {
Comment.aggregate(
[
{ '$group': {
'_id': '$bourbonId',
'avgRating':{ '$avg':'$rating' }
}}
],
function(err,results) {
async.eachLimit(results,10,function(result,callback){
db.update(
{ 'bourbonId': result._id.toString() },
{'$set': {'avgRating': result.avgRating } },
callback // next each iteration
);
},callback); // next in series "last"
}
);
}
],
// This is called when both in the series are complete
function(err) {
if (err) callback(err);
db.find({},{'_id': 0},function(err,docs) {
console.log( docs );
});
}
);
The aim here is:
Put 0 values in a hash table ( here using nedb ) for all items
Get aggregation results from the other collection
Update the copy of all items with those items that actually have a value
When all is done then you read back your hash table
Full working example:
var async = require('async'),
mongoose = require('mongoose'),
DataStore = require('nedb'),
db = new DataStore(),
Schema = mongoose.Schema;
var userSchema = new Schema({
"name": String
});
var ratingSchema = new Schema({
"bourbonId": { "type": Schema.Types.ObjectId, "ref": "Bourbon" },
"userId": { "type": Schema.Types.ObjectId, "ref": "User" },
"rating": { "type": Number, "required": true }
});
var bourbonSchema = new Schema({
"name": { "type": String, "required": true },
"blog": { "type": String, "required": true },
"photo": { "type": String, "required": true },
"ratings": [{ "type": Schema.Types.ObjectId, "ref": "Rating" }],
"rating": { "type": Number }
});
var User = mongoose.model( "User", userSchema ),
Rating = mongoose.model( "Rating", ratingSchema ),
Bourbon = mongoose.model( "Bourbon", bourbonSchema );
mongoose.connect("mongodb://localhost/bourbon");
async.waterfall(
[
function(callback) {
async.each([User,Rating,Bourbon],function(model,callback) {
model.remove({},callback);
},
function(err) {
callback(err);
});
},
function(callback) {
Bourbon.create({
"name": 'test',
"blog": 'test',
"photo": 'test'
},callback);
},
function(bourbon,callback) {
Bourbon.create({
"name": 'another',
"blog": 'another',
"photo": 'another'
},callback);
},
function(bourbon,callback) {
User.create({ "name": 'ted' },function(err,user) {
if (err) callback(err);
Rating.create({
"bourbonId": bourbon,
"userId": user,
"rating": 5
},function(err,rating1) {
callback(err,user,bourbon,rating1)
});
});
},
function(user,bourbon,rating1,callback) {
Rating.create({
"bourbonId": bourbon,
"userId": user,
"rating": 7
},function(err,rating2) {
callback(err,bourbon,rating1,rating2);
});
},
function(bourbon,rating1,rating2,callback) {
Bourbon.findById(bourbon.id,function(err,bourbon) {
bourbon.ratings.push(rating1,rating2);
bourbon.save(function(err,bourbon) {
callback(err)
});
});
},
function(callback) {
async.series(
[
function(callback) {
Bourbon.find({},function(err,results) {
if (err) callback(err);
async.eachLimit(results,10,function(result,callback) {
var plain = result.toObject();
plain.bourbonId = plain._id.toString();
plain.avgRating = 0;
delete plain._id;
db.insert(plain,callback);
},callback);
});
},
function(callback) {
Rating.aggregate(
[
{ "$group": {
"_id": "$bourbonId",
"avgRating": { "$avg": "$rating" }
}}
],
function(err,results) {
if (err) callback(err);
async.eachLimit(results,10,function(result,callback) {
db.update(
{ "bourbonId": result._id.toString() },
{ "$set": { "avgRating": result.avgRating } },
callback
);
},callback);
}
);
}
],
function(err) {
if (err) callback(err);
db.find({},{ '_id': 0 },callback);
}
);
}
],
function(err,results) {
if (err) throw err;
console.log( results );
mongoose.disconnect();
}
);
Returns results as expected:
[ { name: 'test',
blog: 'test',
photo: 'test',
__v: 0,
ratings: [],
bourbonId: '54c17bea8aa5f8c9161f5b6e',
avgRating: 0 },
{ name: 'another',
blog: 'another',
photo: 'another',
__v: 1,
ratings: [ [Object], [Object] ],
bourbonId: '54c17bea8aa5f8c9161f5b6f',
avgRating: 6 } ]

Categories

Resources