My question is:
How can I query in the nested arrays?
I want to change value in key "likeUp" which is nested inside object in array "usersWhoLiked". Where "usersWhoLiked" is nested in array "comments"
How Can I do that with mongoose ?
Request that I wrote beneath... do not work, but is very similar to answer given in StackOverflow post: Mongoose update update nested object inside an array
This is my request to db with updateOne:
try {
const response = await Comments.updateOne(
{
productId,
comments: { $elemMatch: { usersWhoLiked: { $elemMatch: { userId } } } },
},
{
$set: { 'comments.$[outer].usersWhoLiked.$[inner].likeUp': likes.up },
},
{
arrayFilters: [{ 'outer._id': commentId }, { 'inner._userId': userId }],
}
).exec();
return res.status(201).json({ response });
} catch (err) {
console.log(err);
return res.send(err);
}
This is the collection, that I am trying to update:
{
"_id": {
"$oid": "6307569d2308b78b378cc802"
},
"productId": "629da4b6634d5d11a859d729",
"comments": [
{
"userId": "62f29c2c324f4778dff443f6",
"userName": "User",
"date": "2022.08.25",
"confirmed": true,
"likes": {
"up": 0,
"down": 0
},
"content": {
"rating": 5,
"description": "Nowy komentarz"
},
"_id": {
"$oid": "630756b22308b78b378cc809"
},
"usersWhoLiked": [
{
"userId": "62f29c2c324f4778dff443f1",
"likeUp": true,
"_id": {
"$oid": "6307572d2308b78b378cc80e"
}
},
{
"userId": "62f29c2c324f4778dff443f2",
"likeUp": true,
"_id": {
"$oid": "6307572d2308b78b378cc80c"
}
}
]
}
],
"__v": 0
}
Mongooes schema for comment collection:
const commentSchema = new Schema({
productId: String,
comments: [
{
userId: String,
userName: String,
date: String,
confirmed: Boolean,
likes: {
up: {
type: Number,
default: 0,
},
down: {
type: Number,
default: 0,
},
},
content: {
rating: Number,
description: String,
},
usersWhoLiked: [{ userId: String, likeUp: Boolean }],
},
],
});
I guess the problem is with your arrayFilters operator, because you are trying to filter by field _userId which does not exist:
arrayFilters: [{ 'outer._id': commentId }, { 'inner._userId': userId }],
I managed to update the likeUp value using the following query:
db.collection.update({
_id: ObjectId("6307569d2308b78b378cc802")
},
{
$set: {
"comments.$[user].usersWhoLiked.$[like].likeUp": false
}
},
{
arrayFilters: [
{
"user._id": ObjectId("630756b22308b78b378cc809")
},
{
"like.userId": "62f29c2c324f4778dff443f1"
}
]
})
Try it on MongoDB playground: https://mongoplayground.net/p/XhQMNBgEdhp
I am trying to insert a new entry in my collection, the problem is that it ignores both doctorId and patientId while I'am sure they are not undefined. I tried to change the fields types in the definition to strings just to test whether they would be inserted and it still no use.
schema:
const RdvSchema = new mongoose.Schema({
patientId: {
type: mongoose.Schema.Types.ObjectId,
ref: "patients"
},
doctorId: {
type: mongoose.Schema.Types.ObjectId,
ref: "doctors"
},
createdAt: {
type: Date,
default: () => Date.now()
},
updatedAt: {
type: Date,
default: () => Date.now()
},
urgent: {
type: Number,
default: () => false
},
date: Date,
period: String,
description: String
})
the function saving the document:
const createRdv = async (req, res) => {
try {
console.log("patient id: ", req.body.patientId)
let rdv = new Rdv({
"patientId": req.body.patientId,
"doctorId": req.body.doctorId,
"description": req.body.description,
"urgent": req.body.urgent,
"date": req.body.date,
"period": req.body.period
})
await rdv.save(async (err, rdv) => {
if (err) {
console.log(err)
return res.status(500).send(false)
}
try {
await DoctorRepository.addRdv(req.body.doctorId, rdv._id)
await PatientRepository.addRdv(req.body.patientId, rdv._id)
} catch (message) {
console.log(message)
res.status(500).send(false)
}
})
res.status(200).send(true)
} catch (ex) {
res.status(500).send(false)
}
}
The inserted document:
{
"_id": {
"$oid": "6269603d5f0e45e53a470f50"
},
"urgent": 3,
"date": {
"$date": {
"$numberLong": "1653433200000"
}
},
"period": "matin",
"description": "this is a description",
"createdAt": {
"$date": {
"$numberLong": "1651073085661"
}
},
"updatedAt": {
"$date": {
"$numberLong": "1651073085661"
}
},
"__v": 0
}
update: for some reason An old document keeps getting inserted the document has nothing to do with what I was trying to insert. The document is one I had inserted previously through a test using Mocha
In ImageSchema, I have some fields - title, imageURL. When I retrieving single image data, I want to add http://localhost:4000/ to imageURL.
"imageURL": "http://localhost:4000/uploads/photo-1508919801845.jpeg",
Example data:
{
"_id": "612e0328c1c6dd25c6f14fd4",
"title": "photo-1508919801845",
"imageURL": "/uploads/photo-1508919801845.jpeg",
"createdAt": "2021-08-31T10:23:36.419Z",
"updatedAt": "2021-08-31T10:23:36.419Z",
"__v": 0
}
Controller.ts
const imageId = req.params.id
const findImage = await Image.findOne({ _id: imageId})
if (!findImage) {
return res.status(404).json({error: true, msg: "Image not found"})
}
const image = await Image.findById({_id: imageId}).select({ _id: 0, __v: 0})
return res.status(200).json({ error: false, data: image })
How can I do that?
You can use Aggregation pipeline with $concat operator:
Image.aggregate([
{
"$match": {
"_id": "612e0328c1c6dd25c6f14fd4"
}
},
{
"$set": {
"imageURL": {
"$concat": [
"http://localhost:4000",
"$imageURL"
]
}
}
}
])
Working example
So i'm trying to get profile data using API in Node.js but seems never get complete objects (for example avatar is missing from response)
i tried to console.log the object it went perfectly fine
Here is the server code:
router.get('/users/me', auth, async (req, res) => {
try {
const me = await User.findById(req.user._id)
console.log(me)
res.send(me)
} catch (e) {
res.status(500).send(e)
}
})
Here is response on console:
{ avatar:
'https://wisapedia-uploads.s3.amazonaws.com/default_ava.png',
bookmarks: [],
trips: [],
verified: false,
_id: 5d67b4155a032f3450c8ca03,
name: 'Syauqi',
password:
'$2a$08$LwLJPrp6MPdffkpJ1T2iN.QFbMDU5gAsSceNZVzU8tfDe5aUjGfFO',
email: 'ahmadthariq#gmail.com',
number: 85786135661,
birthday: 1997-04-01T16:00:00.000Z,
tokens:
[ { _id: 5d67b4155a032f3450c8ca04,
token:
'eyJhbGciOiJIUzI1NiIsI4gdCI6IkpXVCJ9.eyJfaWQiOiI1ZDY3YjQxNTVhMDMyZjM0NTBjOGNhMDMiLCJpYXQiOjE1NjcwNzczOTd9.i-r2wB6BoqK7cSoIxBiWB6SSESoCk3S5G_sW5PMz09s' } ],
createdAt: 2019-08-29T11:16:37.516Z,
updatedAt: 2019-08-29T11:16:37.572Z,
__v: 1 }
Here is what i got on postman:
{
"bookmarks": [],
"trips": [],
"verified": false,
"_id": "5d67b4155a032f3450c8ca03",
"name": "Syauqi",
"email": "ahmadthariq#gmail.com",
"number": 85786135661,
"birthday": "1997-04-01T16:00:00.000Z",
"createdAt": "2019-08-29T11:16:37.516Z",
"updatedAt": "2019-08-29T11:16:37.572Z",
"__v": 1
}
Thank you
I have been using this mongoose plugin to perform findOrCreate which is used very often in the codebase.
I recently realized that performing multiple asynchronous findOrCreate operations when the unique index is created easily leads to an E11000 duplicate key error.
An example can be described by the following using Promise.all. Suppose name is unique then:
const promises = await Promise.all([
Pokemon.findOrCreate({ name: 'Pikachu' }),
Pokemon.findOrCreate({ name: 'Pikachu' }),
Pokemon.findOrCreate({ name: 'Pikachu' })
]);
The above will certainly fail since findOrCreate is not atomic. It makes sense after thinking about it why it fails but, what I would like is a streamlined way of approaching this problem.
Many of my models use findOrCreate and they are all subject to this problem. One solution that comes to mind would be to create a plugin that would catch the error and then return the result of find however, there may be a better approach here - possibly a native mongoose one that I am not aware of.
It certainly depends on your intended usage of this, but I would say overall that "plugins" are just not required. The basic functionality you are looking for is already "built in" to MongoDB with "upserts".
By definition, an "upsert" cannot produce a "duplicate key error" as long as the query condition to "select" the document is issued using the "unique key" for the collection. In this case "name".
In a nutshell you can mimic the same behavior as above by simply doing:
let results = await Promise.all([
Pokemon.findOneAndUpdate({ "name": "Pikachu" },{},{ "upsert": true, "new": true }),
Pokemon.findOneAndUpdate({ "name": "Pikachu" },{},{ "upsert": true, "new": true }),
Pokemon.findOneAndUpdate({ "name": "Pikachu" },{},{ "upsert": true, "new": true })
]);
Which would simply "create" the item on the first call where it did not already exist, or "return" the existing item. This is how "upserts" work.
[
{
"_id": "5a022f48edca148094f30e8c",
"name": "Pikachu",
"__v": 0
},
{
"_id": "5a022f48edca148094f30e8c",
"name": "Pikachu",
"__v": 0
},
{
"_id": "5a022f48edca148094f30e8c",
"name": "Pikachu",
"__v": 0
}
]
If you really did not care about "returning" each call and simply wanted to "update or create", then it's actually far more efficient to simply send one request with bulkWrite():
// Issue a "batch" in Bulk
let result = await Pokemon.bulkWrite(
Array(3).fill(1).map( (e,i) => ({
"updateOne": {
"filter": { "name": "Pikachu" },
"update": {
"$set": { "skill": i }
},
"upsert": true
}
}))
);
So instead of awaiting the server to resolve three async calls, you only make one which either "creates" the item or "updates" with anything you use in the $set modifier when found. These are applied on every match including the first, and if you want "only on create" there is $setOnInsert to do that.
Of course this is just a "write", so it really depends on whether it is important to you to return the modified document or not. So "bulk" operations simply "write" and they do not return, but instead return information on the "batch" indicating what was "upserted" and what was "modified" as in:
{
"ok": 1,
"writeErrors": [],
"writeConcernErrors": [],
"insertedIds": [],
"nInserted": 0,
"nUpserted": 1, // <-- created 1 time
"nMatched": 2, // <-- matched and modified the two other times
"nModified": 2,
"nRemoved": 0,
"upserted": [
{
"index": 0,
"_id": "5a02328eedca148094f30f33" // <-- this is the _id created in upsert
}
],
"lastOp": {
"ts": "6485801998833680390",
"t": 23
}
}
So if you do want a "return", then a more typical case is to separate which data you want on "create" and which is needed on "update". Noting that the $setOnInsert is essentially "implied" for whatever values are in the "query" condition to select the document:
// Issue 3 pokemon as separate calls
let sequence = await Promise.all(
Array(3).fill(1).map( (e,i) =>
Pokemon.findOneAndUpdate(
{ name: "Pikachu" },
{ "$set": { "skill": i } },
{ "upsert": true, "new": true }
)
)
);
Which would show the modifications applied in "sequence" of each atomic transaction:
[
{
"_id": "5a02328fedca148094f30f38",
"name": "Pikachu",
"__v": 0,
"skill": 0
},
{
"_id": "5a02328fedca148094f30f39",
"name": "Pikachu",
"__v": 0,
"skill": 1
},
{
"_id": "5a02328fedca148094f30f38",
"name": "Pikachu",
"__v": 0,
"skill": 2
}
]
So generally it's "upserts" that you want here, and depending on your intent you either use separate calls to return each modification/creation or you issue your "writes" in a batch.
As a complete listing to demonstrate all the above:
const mongoose = require('mongoose'),
Schema = mongoose.Schema;
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
const uri = 'mongodb://localhost/test',
options = { useMongoClient: true };
const pokemonSchema = new Schema({
name: String,
skill: Number
},{ autoIndex: false });
pokemonSchema.index({ name: 1 },{ unique: true, background: false });
const Pokemon = mongoose.model('Pokemon', pokemonSchema);
function log(data) {
console.log(JSON.stringify(data, undefined, 2))
}
(async function() {
try {
const conn = await mongoose.connect(uri,options);
// Await index creation, otherwise we error
await Pokemon.ensureIndexes();
// Clean data for test
await Pokemon.remove();
// Issue 3 pokemon as separate calls
let pokemon = await Promise.all(
Array(3).fill(1).map( e =>
Pokemon.findOneAndUpdate({ name: "Pikachu" },{},{ "upsert": true, "new": true })
)
);
log(pokemon);
// Clean data again
await Pokemon.remove();
// Issue a "batch" in Bulk
let result = await Pokemon.bulkWrite(
Array(3).fill(1).map( (e,i) => ({
"updateOne": {
"filter": { "name": "Pikachu" },
"update": {
"$set": { "skill": i }
},
"upsert": true
}
}))
);
log(result);
let allPokemon = await Pokemon.find();
log(allPokemon);
// Clean data again
await Pokemon.remove();
// Issue 3 pokemon as separate calls
let sequence = await Promise.all(
Array(3).fill(1).map( (e,i) =>
Pokemon.findOneAndUpdate(
{ name: "Pikachu" },
{ "$set": { "skill": i } },
{ "upsert": true, "new": true }
)
)
);
log(sequence);
} catch(e) {
console.error(e);
} finally {
mongoose.disconnect();
}
})()
Which would produce the output ( for those too lazy to run themselves ):
Mongoose: pokemons.ensureIndex({ name: 1 }, { unique: true, background: false })
Mongoose: pokemons.remove({}, {})
Mongoose: pokemons.findAndModify({ name: 'Pikachu' }, [], { '$setOnInsert': { __v: 0 } }, { upsert: true, new: true, remove: false, fields: {} })
Mongoose: pokemons.findAndModify({ name: 'Pikachu' }, [], { '$setOnInsert': { __v: 0 } }, { upsert: true, new: true, remove: false, fields: {} })
Mongoose: pokemons.findAndModify({ name: 'Pikachu' }, [], { '$setOnInsert': { __v: 0 } }, { upsert: true, new: true, remove: false, fields: {} })
[
{
"_id": "5a023461edca148094f30f82",
"name": "Pikachu",
"__v": 0
},
{
"_id": "5a023461edca148094f30f82",
"name": "Pikachu",
"__v": 0
},
{
"_id": "5a023461edca148094f30f82",
"name": "Pikachu",
"__v": 0
}
]
Mongoose: pokemons.remove({}, {})
Mongoose: pokemons.bulkWrite([ { updateOne: { filter: { name: 'Pikachu' }, update: { '$set': { skill: 0 } }, upsert: true } }, { updateOne: { filter: { name: 'Pikachu' }, update: { '$set': { skill: 1 } }, upsert: true } }, { updateOne: { filter: { name: 'Pikachu' }, update: { '$set': { skill: 2 } }, upsert: true } } ], {})
{
"ok": 1,
"writeErrors": [],
"writeConcernErrors": [],
"insertedIds": [],
"nInserted": 0,
"nUpserted": 1,
"nMatched": 2,
"nModified": 2,
"nRemoved": 0,
"upserted": [
{
"index": 0,
"_id": "5a023461edca148094f30f87"
}
],
"lastOp": {
"ts": "6485804004583407623",
"t": 23
}
}
Mongoose: pokemons.find({}, { fields: {} })
[
{
"_id": "5a023461edca148094f30f87",
"name": "Pikachu",
"skill": 2
}
]
Mongoose: pokemons.remove({}, {})
Mongoose: pokemons.findAndModify({ name: 'Pikachu' }, [], { '$setOnInsert': { __v: 0 }, '$set': { skill: 0 } }, { upsert: true, new: true, remove: false, fields: {} })
Mongoose: pokemons.findAndModify({ name: 'Pikachu' }, [], { '$setOnInsert': { __v: 0 }, '$set': { skill: 1 } }, { upsert: true, new: true, remove: false, fields: {} })
Mongoose: pokemons.findAndModify({ name: 'Pikachu' }, [], { '$setOnInsert': { __v: 0 }, '$set': { skill: 2 } }, { upsert: true, new: true, remove: false, fields: {} })
[
{
"_id": "5a023461edca148094f30f8b",
"name": "Pikachu",
"__v": 0,
"skill": 0
},
{
"_id": "5a023461edca148094f30f8b",
"name": "Pikachu",
"__v": 0,
"skill": 1
},
{
"_id": "5a023461edca148094f30f8b",
"name": "Pikachu",
"__v": 0,
"skill": 2
}
]
N.B The $setOnInsert is also "implied" in all "mongoose" operations for the purpose of applying the __v key. So unless you turn this off, that statement is always "merged" with whatever is issued and thus allows the {} in the first example "update" block which would be an error in the core driver due to no update modifier being applied, yet mongoose adds this one for you.
Also note that bulkWrite() does not actually reference the "schema" for the model and bypasses it. This is why there is no __v in those issued updates, and it does indeed bypass all validation as well. This is usually not an issue, but it is something you should be aware of.