I am trying to modify query to get expected output.I am able to write the query but not getting the output as expected so that I may bind in the front end.
Actual output:-
{
"_id" : null,
"first" : 3571.0,
"second" : 24.0
}
Expected output:-
{ "_id" : null,
"opertion":edit,
"count" : 3571.0,
}
{ "_id" : null,
"opertion":read,
"count" : 24,
}
{ "_id" : null,
"opertion":update,
"count" : 9000,
}
Myquery:-
db.getCollection('blog').aggregate([
{ "$group": {
"_id": null,
"first": {
"$sum": {
"$cond": [{ "$in": ["$Operation", ["edit1", "edit2"]] }, 1, 0]
}
},
"second": {
"$sum": {
"$cond": [{ "$in": ["$Operation", ["read1", "read2"]] }, 1, 0]
}
}
},
},
])
if you have collection which is like as below:
[
{
"_id" : 1,
"operation" : "edit1" # some extra fields
},
{
"_id" : 2,
"operation" : "read1"
},
{
"_id" : 3,
"operation" : "update1"
}
]
by using $project and $cond you can rename the "read1", "read2" to read or updates to update, or edits to edit then by grouping on the new operation field you can get the count of each operation.
you can use this query:
db.aggregate([
{
"$project": {
"new_operation":
{
"$cond": [
{"$in":
["$Operation", ["edit1", "edit2"]]
}, "edit", {
"$cond": [
{"$in":
["$operation", ["read1", "read2"]]
}, "read", "update"]
}
]
}
}
},
{
"$group": {
"_id": "$new_operation",
"count": {"$sum": 1}
}
}
])
Related
idk if this is possible but need some help with mongo, I have the following document, and I want to make it so I can use $addToSet to add a value to one of the items in votes, but remove that item from all the other items in votes but have no idea how
{
_id: '872952643117518909',
questions: [
{ question: 'a', number: 1, dropDownInfo: [Object] },
{ question: 'b', number: 2, dropDownInfo: [Object] },
{ question: 'c', number: 3, dropDownInfo: [Object] }
],
votes: {
'1': [ '619284841187246090', '662697094104219678' ],
'2': [ '619284841187246090', '662697094104219678' ],
'3': [ '662697094104219678', '619284841187246090' ]
},
question: 'abc',
timestamp: 1628198528903,
finished: false,
channel: '812038854302892064'
}
The bellow pipeline adds a vote('619284841187246090') to a specific field,here randomly "2" was chosen,and removes that vote from "1" and "3" array.
Solution is general,can work with any vote fields not just "1" "2" "3".
You can use this pipeline in aggregation or update with pipeline (Mongodb>=4.2)
$addToSet doesn't work in arrays, it works when grouping and
in some other places in MongoDB 5.
I think your schema has a problem, because you are saving data in the schema, and that makes querying harder and creating indexing harder etc.
But we can still do it converting the object to array and back to object.
I think its best to keep data in arrays,and fields to be the known schema.
You can run the bellow code here
Query
db.collection.aggregate( [ {
"$addFields" : {
"votes" : {
"$arrayToObject" : {
"$map" : {
"input" : {
"$map" : {
"input" : {
"$objectToArray" : "$votes"
},
"as" : "m",
"in" : [ "$$m.k", "$$m.v" ]
}
},
"as" : "vote",
"in" : {
"$cond" : [ {
"$eq" : [ {
"$arrayElemAt" : [ "$$vote", 0 ]
}, "2" ]
}, [ {
"$arrayElemAt" : [ "$$vote", 0 ]
}, {
"$cond" : [ {
"$in" : [ "619284841187246090", {
"$arrayElemAt" : [ "$$vote", 1 ]
} ]
}, {
"$arrayElemAt" : [ "$$vote", 1 ]
}, {
"$concatArrays" : [ {
"$arrayElemAt" : [ "$$vote", 1 ]
}, [ "619284841187246090" ] ]
} ]
} ], [ {
"$arrayElemAt" : [ "$$vote", 0 ]
}, {
"$filter" : {
"input" : {
"$arrayElemAt" : [ "$$vote", 1 ]
},
"as" : "v",
"cond" : {
"$not" : [ {
"$eq" : [ "$$v", "619284841187246090" ]
} ]
}
}
} ] ]
}
}
}
}
}
} ])
Results
[
{
"_id": "872952643117518909",
"channel": "812038854302892064",
"finished": false,
"question": "abc",
"questions": [
{
"dropDownInfo": "",
"number": 1,
"question": "a"
},
{
"dropDownInfo": "",
"number": 2,
"question": "b"
},
{
"dropDownInfo": "",
"number": 3,
"question": "c"
}
],
"timestamp": 1.628198528903e+12,
"votes": {
"1": [
"662697094104219678"
],
"2": [
"619284841187246090",
"662697094104219678"
],
"3": [
"662697094104219678"
]
}
}
]
My document in cosmosdb looks like this
{
"todayDate": "2017-12-08",
"data": [
{
"group": {"priority": 1, "total": 10},
"severity": 1
},
{
"group": {"priority": 2, "total": 13},
"priority": 2
}
]
}
The following query when issued from either mongoShell for cosmosdb in azure portal or using my spring data mongodb project works fine and returns results in no time:
db.myCollection.find({ "$or" : [ { "data" : { "$elemMatch" : { "priority" : 1}} , "$or" : [ { "data" : { "$elemMatch" : { "group.priority" : 1}}}] }]})
However, the following query on the same lines with more OR conditions which basically is two of the above queries with OR operator, hangs indefinitely:
db.myCollection.find({ "$or": [ { "data" : { "$elemMatch" : { "priority" : 1}} , "$or" : [ { "data" : { "$elemMatch" : { "group.priority" : 1}}}] }, { "data" : { "$elemMatch" : { "severity" : 2}} , "$or" : [ { "data" : { "$elemMatch" : { "group.severity" : 2}}}] } ] })
Is there anything wrong with the last query that makes it hang indefinitely? Even if I replace initial OR with AND, still the same result i.e. hangs indefinitely.
I created 3 documents in my cosmos db according to the document template you provided.
[
{
"id": "1",
"todayDate": "2017-12-08",
"data": [
{
"group": {
"severity": 1,
"total": 10
},
"severity": 1
},
{
"group": {
"priority": 1,
"total": 13
},
"priority": 1
}
]
},
{
"id": "2",
"todayDate": "2017-12-09",
"data": [
{
"group": {
"priority": 3,
"total": 10
},
"severity": 1
},
{
"group": {
"priority": 3,
"total": 13
},
"priority": 1
}
]
},
{
"id": "3",
"todayDate": "2017-12-10",
"data": [
{
"group": {
"priority": 1,
"total": 10
},
"severity": 1
},
{
"group": {
"priority": 2,
"total": 13
},
"priority": 2
}
]
}
]
Then I use Robo 3T tool to execute your sql.
db.coll.find({
"$or": [
{ "data" : { "$elemMatch" : { "priority" : 1}} ,
"$or" : [
{ "data" : { "$elemMatch" : { "group.priority" : 1}}}
] },
{ "data" : { "$elemMatch" : { "severity" : 2}} ,
"$or" : [
{ "data" : { "$elemMatch" : { "group.severity" : 2}}}
] }
]
})
result:
The syntax of the $or that I found on the official document is:
{ $or: [ { <expression1> }, { <expression2> }, ... , { <expressionN> } ] }
It seems that your SQL can be executed normally though it is different from the above syntax. Per my experience, $or is generally used to be nested with $and (MongoDB Nested OR/AND Where?) ,so I do not quite understand what is the purpose of your $or nested here.
Surely, an indefinite hang is probably because the data is too large so that SQL runs too long and you need to optimize your SQL.
Hope it helps you.Any concern ,please let me know.
Update Answer:
I have properly modified my 3 sample documents then query 2 eligible documents via the SQL you provided.
SQL:
db.coll.find(
{
"$and": [
{
"$or": [
{
"data": {
"$elemMatch": {
"priority": 2
}
}
},
{
"data": {
"$elemMatch": {
"group.priority": 2
}
}
}
]
},
{
"$or": [
{
"data": {
"$elemMatch": {
"severity": 1
}
}
},
{
"data": {
"$elemMatch": {
"group.severity": 1
}
}
}
]
}
]
}
)
Results:
So , I think your SQL is correct. Is the data in the database very large? If you've been hanging for a long time, did you have seen timeout error messages? Or you could check RUs setting's issue.
I have the following document in my collection.
{
"_id" : ObjectId("55961a28bffebcb8058b4570"),
"title" : "BackOffice 2",
"cts" : NumberLong(1435900456),
"todo_items" : [
{
"id" : "55961a42bffebcb7058b4570",
"task_desc" : "test 1",
"completed_by" : "557fccb5bffebcf7048b457c",
"completed_date" : NumberLong(1436161096)
},
{
"id" : "559639afbffebcc7098b45a6",
"task_desc" : "test 2",
"completed_by" : "557fccb5bffebcf7048b457c",
"completed_date" : NumberLong(1435911809)
},
{
"id" : "559a22f5bffebcb0048b476c",
"task_desc" : "test 3",
}
],
"uts" : NumberLong(1436164853)
}
I need an aggregation query to perform following, if there is field "completed_by" and "completed_date" and if there is a value which is not null push in to the "completed" array field, otherwise push them into the "incomplete" field.
Following is a sample result I want.
{
"_id" : ObjectId("55961a28bffebcb8058b4570"),
"completed" : [
{
"id":"557fccb5bffebcf7048b457c",
"title":"test 1",
"completed_by" : "557fccb5bffebcf7048b457c",
"completed_date" : NumberLong(1436161096)
},
{
"id":"557fccb5bffebcf7048b457c",
"title":"test 1",
"completed_by" : "557fccb5bffebcf7048b457c",
"completed_date" : NumberLong(1436161096)
}
],
"incomplete":[
{
"id" : "559a22f5bffebcb0048b476c",
"title" : "test 3"
}
]
}
As long as your "array" items have "distinct" identifiers ( which they have ) there are a couple of approaches to this;
Firstly, without actually "aggregating accross documents":
db.collection.aggregate([
{ "$project": {
"title": 1,
"cts": 1,
"completed": { "$setDifference": [
{ "$map": {
"input": "$todo_items",
"as": "i",
"in": {
"$cond": [
"$$i.completed_date",
"$$i",
false
]
}
}},
[false]
]},
"incomplete": { "$setDifference": [
{ "$map": {
"input": "$todo_items",
"as": "i",
"in": {
"$cond": [
"$$i.completed_date",
false,
"$$i"
]
}
}},
[false]
]}
}}
])
That requires that you at least have MongoDB 2.6 available on the server in order to use the required $map and $setDifference operators. It's pretty fast considering that all the work is done in a single $project stage.
The alternative, which you should only use when "aggregating across documents", is available to all versions supporting the aggregation framework post MongoDB 2.2:
db.collection.aggregate([
{ "$unwind": "$todo_items" },
{ "$group": {
"_id": "$_id",
"title": { "$first": "$title" },
"cts": { "$first": "$cts" },
"completed": {
"$addToSet": {
"$cond": [
"$todo_items.completed_date",
"$todo_items",
null
]
}
},
"incomplete": {
"$addToSet": {
"$cond": [
"$todo_items.completed_date",
null,
"$todo_items",
]
}
}
}},
{ "$unwind": "$completed" },
{ "$match": { "completed": { "$ne": null } } },
{ "$group": {
"_id": "$_id",
"title": { "$first": "$title" },
"cts": { "$first": "$cts" },
"completed": { "$push": "$completed" },
"incomplete": { "$first": "$incomplete" }
}}
{ "$unwind": "$incomplete" },
{ "$match": { "incomplete": { "$ne": null } } },
{ "$group": {
"_id": "$_id",
"title": { "$first": "$title" },
"cts": { "$first": "$cts" },
"completed": { "$first": "$completed" },
"incomplete": { "$push": "$incomplete" }
}}
])
Which isn't entirely all there since you need to cater for conditions where an array may end up empty. But that is not the real lesson here since MongoDB 2.6 is already a couple of years in circulation.
In aggregation, you cannot really exclude the "null/false" results, but you can "filter" them.
Also, unless you are actually "aggregating accross documents" as mentioned already, then the second form with $unwind to process the arrays comes with a "lot" of overhead. So you really should be altering the array contents in your client code as each document is read.
Can you please check the below :
db.collection.aggregate([
{$unwind : "$todo_items"},
{$group: {_id : "$_id" , completed : {{$cond :
{
if : { $and : [ {"todo_items.completed_by" : {$exists: true, $ne : null }},
{"todo_items.completed_date" : {$exists : true, $ne : null}} ] } },
then : {$push : {"old_completed" : "$todo_items"}},
else: {$push : {"old_incompleted" : "$todo_items"}}
} } } },
{$project: {_id : "$_id", completed : "$completed.old_completed" ,
incompleted : "$completed.old_incompleted"}}
]);
My documents looks like this:
{
"_id" : "53ce85eda2579da8b40c1f0f",
"name" : "Autokino",
"tags" : [
"forMen"
],
"ratings" : [
{ "rating" : 5, "uuid" : "..."},
{ "rating" : 4, "uuid" : "..."},
{ "rating" : 4, "uuid" : "..."},
{ "rating" : 1, "uuid" : "..."},
]
}
Now I need the average of ratings.rating (here it should be 3.5). My query looks like this:
activities.aggregate([
{ $match: { _id: ObjectID(req.params.id) } },
{ $unwind: '$ratings' },
{ $group: {
_id: '$_id',
rating: { $avg: '$ratings.rating'},
}},
]);
It works, but what I get is:
{
"_id" : "53ce85eda2579da8b40c1f0f",
"rating" : 3.5
}
and this is what I need to get:
{
"_id" : "53ce85eda2579da8b40c1f0f",
"name" : "Autokino",
"tags" : [
"forMen"
],
"rating" : 3.5
}
(The original document without ratings array but with rating average)
How can I solve this problem?
Pipeline stages like $group and $project are "absolute" in that only the declared fields are emitted. You need another operator here. $first will do:
activities.aggregate([
{ "$match": { "_id": ObjectID(req.params.id) } },
{ "$unwind": "$ratings" },
{ "$group": {
"_id": "$_id",
"name": { "$first": "$name" },
"tags": { "$first": "$tags" },
"rating": { "$avg": "$ratings.rating" },
}},
]);
Since $unwind makes many documents out of the array contents de-normalized, you can use $first here to just take the "first" occurrence of the additional field that you are not otherwise aggregating.
If you are worried about lots of fields to declare this way MongoDB 2.6 does offer the $$ROOT variable. I'ts usage and output are likely not what you really want:
activities.aggregate([
{ "$match": { "_id": ObjectID(req.params.id) } },
{ "$project": {
"_id": "$$ROOT",
"ratings": 1
}},
{ "$unwind": "$ratings" },
{ "$group": {
"_id": "$_id",
"rating": { "$avg": "$ratings.rating" },
}},
]);
This gives you something like:
{
"_id" : {
"_id": "53ce85eda2579da8b40c1f0f",
"name" : "Autokino",
"tags" : [
"forMen"
],
"ratings" : [
{ "rating" : 5, "uuid" : "..."},
{ "rating" : 4, "uuid" : "..."},
{ "rating" : 4, "uuid" : "..."},
{ "rating" : 1, "uuid" : "..."},
]
},
"rating": 3.5
}
This is okay here since grouping by _id is the same as grouping on the whole document. So you can always add a final $project to return to similar state. But there are no wildcards here.
I just went through this whole song and dance as well and ended up having to re-add all my fields back. Not ideal!
So I just found this - much easier: I have a reviews array field, which has a rating property of 1 - 5
{
$addFields: { avg: { $avg: '$reviews.rating'}}
},
Is this the correct query for finding all docs that user1 received where archived = true for user1?
var query = {
"to.username": user1,
"to.section.archive": true
};
Models.Message.find( query ).sort([['to.updated','descending']]).exec(function (err, messages) {
A sample embedded 'To' array of a messages Schema looks like this:
"to" : [
{
"user" : ObjectId("53b96c735f4a3902008aa019"),
"username" : "user1",
"updated" : ISODate("2014-07-08T06:23:43.000Z"),
"_id" : ObjectId("53bb8e6f1e2e72fd04009dad"),
"section" : {
"in" : true,
"out" : false,
"archive" : true
}
}
]
The query should only return the doc above (user1 and archive is true)..not this next doc (archive is true, but not user1):
"to" : [
{
"user" : ObjectId("53b96c735f4a3902008aa019"),
"username" : "user2",
"updated" : ISODate("2014-07-08T06:24:42.000Z"),
"_id" : ObjectId("53bb8e6f1e2e72fd04009dad"),
"section" : {
"in" : true,
"out" : false,
"archive" : true
}
}
]
You want the $elemMatch operator to select the element that has both conditions and the positional $ operator for projection:
Models.Message.find(
{
"to": {
"$elemMatch": {
"username": "user2",
"section.archive": true
}
}
},
{ "created": 1, "message": 1, "to.$": 1 }
).sort([['to.updated','descending']]).exec(function (err, messages) {
});
Please note that this only works in matching the "first" element for projection. Also you want to "sort" on the value of the matching array element, and you cannot do that with .find() and the .sort() modifier.
If you want more than one match in the array then you need to use the aggregate method. This does more complex "filtering" and "projection" than is possible otherwise:
Models.Message.aggregate([
// Match documents
{ "$match": {
"to": {
"$elemMatch": {
"username": "user2",
"section.archive": true
}
}
}},
// Unwind to de-normalize
{ "$unwind": "$to" },
// Match the array elements
{ "$match": {
"to.username": "user2",
"to.section.archive": true
}},
// Group back to the original document
{ "$group": {
"_id": "$_id",
"created": { "$first": "$created" },
"message": { "$first": "$message" },
"to": { "$push": "$to" }
}}
// Sort the results "correctly"
{ "$sort": { "to.updated": -1 } }
],function(err,messages) {
});
Or you can avoid using $unwind and $group by applying some logic with the $map operator in MongoDB 2.6 or greater. Just watching that your array contents are "truly" unique as $setDifference is applied to the resulting "filtered" array:
Models.Message.aggregate([
{ "$match": {
"to": {
"$elemMatch": {
"username": "user2",
"section.archive": true
}
}
}},
{ "$project": {
"created": 1,
"message": 1,
"_id": 1,
"to": {
"$setDifference": [
{
"$map": {
"input": "$to",
"as": "el",
"in": {
"$cond": [
{
"$and": [
{ "$eq": [ "$$el.username", "user2" ] },
"$$el.section.archive"
]
},
"$$el",
false
]
}
}
},
[false]
]
}
}},
{ "$sort": { "to.updated": -1 } }
],function(err,messages) {
});
Or even using $redact:
Models.Messages.aggregate([
{ "$match": {
"to": {
"$elemMatch": {
"username": "user2",
"section.archive": true
}
}
}},
{ "$redact": {
"$cond": {
"if": {
"$and": [
{ "$eq": [
{ "$ifNull": [ "$username", "user2" ] },
"user2"
] },
{ "$ifNull": [ "$section.archive", true ] }
]
},
"then": "$$DESCEND",
"else": "$$PRUNE"
}
}},
{ "$sort": { "to.updated": -1 } }
],function(err,messages) {
});
But be careful as $redact operates over all levels of the document, so your result might be unexpected.
Likely your "to" array actually only has single entries that will match though, so generally the standard projection should be fine. But here is how you do "multiple" matches in an array element with MongoDB.