Get some elements from an array mongoDB - javascript

In MongoDB shell version v4.4.6
the following code works perfectly.
db['pri-msgs'].findOne({tag:'aaa&%qqq'},{msgs:{$slice:-2}})
But in nodeJs mongoDB the following code doesn't work.
db.collection('pri-msgs').findOne({
tag: 'aaa&%qqq'
}, {
msgs: {
slice: -2
}
})
My document-->
{"_id":{"$oid":"60c4730fadf6891850db90f9"},"tag":"aaa&%qqq","msgs":[{"msg":"abc","sender":0,"mID":"ctYAR5FDa","time":1},{"msg":"bcd","sender":0,"mID":"gCjgPf85z","time":2},{"msg":"def","sender":0,"mID":"lAhc4yLr6","time":3},{"msg":"efg","sender":0,"mID":"XcBLC2rGf","time":4,"edited":true},{"msg":"fgh","sender":0,"mID":"9RWVcEOlD","time":5},{"msg":"hij","sender":0,"mID":"TJXVTuWrR","time":6},{"msg":"jkl","sender":0,"mID":"HxUuzwrYN","time":7},{"msg":"klm","sender":0,"mID":"jXEOhARC2","time":8},{"msg":"mno","sender":0,"mID":"B8sVt4kCy","time":9}]}
Actually what I'm trying to do is Get last 2 itmes from msgs Array where time is greater than 'n'. Here 'n' is a number.

You can use aggregation-pipeline to get the results you are looking for. The steps are the following.
Match the documents you want by tag.
Unwind the msgs array.
Sort descending by msgs.time.
Limit first 2 elements.
Match the time you are looking for using a range query.
Group the documents back by _id.
Your query should look something like this:
db['pri-msgs'].aggregate([
{ $match: { tag: 'aaa&%qqq' } },
{ $unwind: '$msgs' },
{
$sort: {
'msgs.time': -1 //DESC
}
},
{ $limit: 2 },
{
$match: {
'msgs.time': {
$gt: 2 //n
}
}
},
{
$group: {
_id: '$_id',
tag: { $first: '$tag' },
msgs: {
$push: { msg: '$msgs.msg', sender: '$msgs.sender', mID: '$msgs.mID', time: '$msgs.time' }
}
}
}
]);

Related

MongoDB Aggregation - How to get/update sum

For example, I have something in my database like in customers collection.
{
Max: {
shoping_list: {
food: { Pizza: 2, Ramen: 1, Sushi: 5 }
}
},
John: {
shoping_list: {
food: { Pizza: 2, Ramen: 1, Burger: 1 }
}
}
}
In my backend, I want to get the sum of food
const request = await customers.aggregate([
{
$group: {
_id: null,
Pizza: {
$sum: '$shoping_list.food.Pizza',
},
Is there a way how to update or get the sum automatically without manually writing every food from the shopping_list?
The design of the document may lead the query looks complex but still achievable.
$replaceRoot - Replace the input document with a new document.
1.1. $reduce - Iterate the array and transform it into a new form (array).
1.2. input - Transform key-value pair of current document $$ROOT to an array of objects such as: [{ k: "", v: "" }]
1.3. initialValue - Initialize the value with an empty array. And this will result in the output in the array.
1.4. in
1.4.1. $concatArrays - Combine aggregate array result ($$value) with 1.4.2.
1.4.2. With the $cond operator to filter out the document with { k: "_id" }, and we transform the current iterate object's v shoping_list.food to the array via $objectToArray.
$unwind - Deconstruct the foods array into multiple documents.
$group - Group by foods.k and perform sum for foods.v.
db.collection.aggregate([
{
$replaceRoot: {
newRoot: {
foods: {
$reduce: {
input: {
$objectToArray: "$$ROOT"
},
initialValue: [],
in: {
$concatArrays: [
"$$value",
{
$cond: {
if: {
$ne: [
"$$this.k",
"_id"
]
},
then: {
$objectToArray: "$$this.v.shoping_list.food"
},
else: []
}
}
]
}
}
}
}
}
},
{
$unwind: "$foods"
},
{
$group: {
_id: "$foods.k",
sum: {
$sum: "$foods.v"
}
}
}
])
Demo # Mongo Playground

Aggregate match doesn't work once I add more than 1 match?

I'm having some trouble with this aggregate function. It works correctly when I only have a single match argument (created_at), however when I add a second one (release_date) it never returns any results, even though it should. I've also tried the matches with the '$and' parameter with no luck.
Here is the code. Anyone know what I'm doing wrong?
Thanks!
db.collection('votes).aggregate([
{
$match: {
$and:
[
{ created_at: { $gte: ISODate("2021-01-28T05:37:58.549Z") }},
{ release_date: { $gte: ISODate("2018-01-28T05:37:58.549Z") }}
]
}
},
{
$group: {
_id: '$title',
countA: { $sum: 1 }
}
},
{
$sort: { countA: -1 }
}
])

Mongoose date $gte operator not working as expected

I am trying to write query for last week but it is not working as expected in mongoDB.
[{
$lookup: {
from: 'reviews',
localField: 'groupReviews',
foreignField: '_id',
as: 'groupReviews'
}
}, {
$match: {
$and: [{
_id: {
$eq: ObjectId('5f247eea8ad8eb53883f4a9b')
}
},
{
"groupReviews.reviewCreated": {
$gte: ISODate('2020-06-20T10:24:51.303Z')
}
}
]
}
}, {
$project: {
count: {
$size: "$groupReviews",
},
groupReviews: {
$slice: ["$groupReviews", 0, 20],
}
}
}, {
$sort: {
"groupReviews.reviewCreated": -1
}
}]
the actual result: above code returning results which is older than 2020-06-20.
the expected result: it should not display older than 2020-06-20.
I am attaching an image for more reference.
Image Link
The $match stages matches entire documents, not individual array elements. If the array contains at least one element that satisfies the $gte condition, the document will be matched and passed along the pipeline.
If you want to remove the individual array elements that are older than the given date, you could either
$unwind the array before matching and $group to rebuild it with only the matching entries
Use $filter in your $project stage to eliminate the unwanted elements prior to slicing

get $max of string

I am trying to aggregate data on my mongodb and to get some statistics regarding a collection,
i have a field called country and i need to get the one that exists the most
below attached is the current code,
consider my collection has 3 document:
{_id:1, country:"Italy"}
{_id:1, country:"Zimbabwe"}
{_id:1, country:"Italy"}
The expected output should be :Italy
But in reality i get Zimbabwe because it compares Z > I
visitSchema.statics.calcTotalVisits = async function (link) {
const stats = await this.aggregate([
{
$match: { link },
},
{
$group: {
_id: '$link',
sumDocs: { $sum: 1 },
topReferer: { $max: '$referer' },
topCountry: { $max: '$country' },
},
},
]);
console.log(stats);
};

Returning count w/ data in MongoDB Aggregation

I've written a MongoDB aggregation query that uses a number of stages. At the end, I'd like the query to return my data in the following format:
{
data: // Array of the matching documents here
count: // The total count of all the documents, including those that are skipped and limited.
}
I'm going to use the skip and limit features to eventually pare down the results. However, I'd like to know the count of the number of documents returned before I skip and limit them. Presumably, the pipeline stage would have to occur somewhere after the $match stage but before the $skip and $limit stages.
Here's the query I've currently written (it's in an express.js route, which is why I'm using so many variables:
const {
minDate,
maxDate,
filter, // Text to search
filterTarget, // Row to search for text
sortBy, // Row to sort by
sortOrder, // 1 or -1
skip, // rowsPerPage * pageNumber
rowsPerPage, // Limit value
} = req.query;
db[source].aggregate([
{
$match: {
date: {
$gt: minDate, // Filter out by time frame...
$lt: maxDate
}
}
},
{
$match: {
[filterTarget]: searchTerm // Match search query....
}
},
{
$sort: {
[sortBy]: sortOrder // Sort by date...
}
},
{
$skip: skip // Skip the first X number of doucuments...
},
{
$limit: rowsPerPage
},
]);
Thanks for your help!
We can use facet to run parallel pipelines on the data and then merge the output of each pipeline.
The following is the updated query:
db[source].aggregate([
{
$match: {
date: {
$gt: minDate, // Filter out by time frame...
$lt: maxDate
}
}
},
{
$match: {
[filterTarget]: searchTerm // Match search query....
}
},
{
$set: {
[filterTarget]: { $toLower: `$${filterTarget}` } // Necessary to ensure that sort works properly...
}
},
{
$sort: {
[sortBy]: sortOrder // Sort by date...
}
},
{
$facet:{
"data":[
{
$skip: skip
},
{
$limit:rowsPerPage
}
],
"info":[
{
$count:"count"
}
]
}
},
{
$project:{
"_id":0,
"data":1,
"count":{
$let:{
"vars":{
"elem":{
$arrayElemAt:["$info",0]
}
},
"in":{
$trunc:"$$elem.count"
}
}
}
}
}
]).pretty()
I think I figured it out. But if someone knows that this answer is slow, or at least faulty in some way, please let me know!
It's to add a $group stage, passing null as the value, then pushing each document, $$ROOT, into the data array, and for each one, incrementing count by 1 with the $sum operator.
Then, in the next $project stage, I simply remove the _id property, and slice down the array.
db[source].aggregate([
{
$match: {
date: {
$gt: minDate, // Filter out by time frame...
$lt: maxDate
}
}
},
{
$match: {
[filterTarget]: searchTerm // Match search query....
}
},
{
$set: {
[filterTarget]: { $toLower: `$${filterTarget}` } // Necessary to ensure that sort works properly...
}
},
{
$sort: {
[sortBy]: sortOrder // Sort by date...
}
},
{
$group: {
_id: null,
data: { $push: "$$ROOT" }, // Push each document into the data array.
count: { $sum: 1 }
}
},
{
$project: {
_id: 0,
count: 1,
data: {
$slice: ["$data", skip, rowsPerPage]
},
}
}
]).pretty()

Categories

Resources