Sequelize referencing to wrong foreign key - javascript

There are two tables which are Transaction and Transaction Details. The recordId is a foreign key of Transaction Details.
I am trying to find a Transaction by recordId and then include the Transaction Details. A Transaction can have many Transaction Details. Here are my codes:
db.models.Transaction.hasMany(db.models.TransactionDetails, {
foreignKey: 'recordId'
});
And then I'm querying a findOne and it looks like this:
db.models.Transaction.findOne({
where: {
recordId: req.query.recordid
} ,
include: [{
model: db.models.TransactionDetails
}]
})
But when I sent the request on Postman, the JSON data looked like this:
{
"error": false,
"message": {
"id": "8151",
"recordId": "6688",
"transactionNo": "1563804469415",
"cashierId": "4",
"payType": "cash",
"terminalNo": "0012346",
"amount": 40,
"discount": 0,
"cardNo": "none",
"transDate": "2019-07-22T14:23:26.000Z",
"createdAt": "2019-07-22T14:20:19.679Z",
"updatedAt": "2019-07-22T14:20:19.679Z",
"pt-transactions-details": [
{
"id": "38048",
"recordId": "8151", //this is wrong, the recordId must be the same as above which is 6688
"transId": "3731",
"productId": "539",
"quantity": "1",
"amount": 60,
"terminal": "002789",
"createdAt": "2019-09-13T01:22:48.349Z",
"updatedAt": "2019-09-13T01:22:48.349Z"
},
{
"id": "9921",
"recordId": "8151", //this is wrong, the recordId must be the same as above which is 6688
"transId": "3985",
"productId": "1061",
"quantity": "2",
"amount": 100,
"terminal": "0012346",
"createdAt": "2019-07-05T03:44:49.406Z",
"updatedAt": "2019-07-05T03:44:49.406Z"
},
{
"id": "68848",
"recordId": "8151", //this is wrong, the recordId must be the same as above which is 6688
"transId": "5358",
"productId": "1128",
"quantity": "1",
"amount": 160,
"terminal": "171412",
"createdAt": "2019-10-15T13:00:03.864Z",
"updatedAt": "2019-10-15T13:00:03.864Z"
}
]
}
}
Can someone help me regarding this? I already spent a day trying to figure this out.

Short answer, you need to pass sourceKey into hasMany method.
Transaction.hasMany(TransactionDetail, { foreignKey: 'recordId', sourceKey: 'recordId' });
Long answer, here is an example:
index.ts:
import { sequelize } from '../../db';
import { Model, DataTypes } from 'sequelize';
import assert from 'assert';
class Transaction extends Model {}
Transaction.init(
{
recordId: {
unique: true,
type: DataTypes.STRING,
},
},
{ sequelize, modelName: 'transactions' },
);
class TransactionDetail extends Model {}
TransactionDetail.init(
{
amount: DataTypes.INTEGER,
},
{ sequelize, modelName: 'transaction_details' },
);
Transaction.hasMany(TransactionDetail, { foreignKey: 'recordId', sourceKey: 'recordId' });
(async function test() {
try {
await sequelize.sync({ force: true });
await Transaction.create(
{ recordId: '6688', transaction_details: [{ amount: 60 }, { amount: 100 }, { amount: 160 }] },
{ include: [TransactionDetail] },
);
const rval = await Transaction.findOne({ where: { recordId: '6688' }, include: [TransactionDetail] });
console.log(rval.dataValues);
assert.equal(rval.transaction_details.length, 3, 'transaction details count should equal 3');
const transactionDetailsDataValues = rval.transaction_details.map((d) => d.dataValues);
console.log('transactionDetailsDataValues: ', transactionDetailsDataValues);
} catch (error) {
console.log(error);
} finally {
await sequelize.close();
}
})();
The execution result of above code:
{ id: 1,
recordId: '6688',
transaction_details:
[ transaction_details {
dataValues: [Object],
_previousDataValues: [Object],
_changed: {},
_modelOptions: [Object],
_options: [Object],
isNewRecord: false },
transaction_details {
dataValues: [Object],
_previousDataValues: [Object],
_changed: {},
_modelOptions: [Object],
_options: [Object],
isNewRecord: false },
transaction_details {
dataValues: [Object],
_previousDataValues: [Object],
_changed: {},
_modelOptions: [Object],
_options: [Object],
isNewRecord: false } ] }
transactionDetailsDataValues: [ { id: 1, amount: 60, recordId: '6688' },
{ id: 2, amount: 100, recordId: '6688' },
{ id: 3, amount: 160, recordId: '6688' } ]
Check the data record in the database:
node-sequelize-examples=# select * from "transactions";
id | recordId
----+----------
1 | 6688
(1 row)
node-sequelize-examples=# select * from "transaction_details";
id | amount | recordId
----+--------+----------
1 | 60 | 6688
2 | 100 | 6688
3 | 160 | 6688
(3 rows)
Sequelize version: "sequelize": "^5.21.3"
source code: https://github.com/mrdulin/node-sequelize-examples/tree/master/src/examples/stackoverflow/60446814

You need to define Sequelize association as mentioned below -
db.models.Transaction.belongsTo(db.models.TransactionDetails, {
foreignKey: 'recordId', targetKey: 'recordId'
});
I hope it helps!

Related

Mongoose, updated nested array

My question is:
How can I query in the nested arrays?
I want to change value in key "likeUp" which is nested inside object in array "usersWhoLiked". Where "usersWhoLiked" is nested in array "comments"
How Can I do that with mongoose ?
Request that I wrote beneath... do not work, but is very similar to answer given in StackOverflow post: Mongoose update update nested object inside an array
This is my request to db with updateOne:
try {
const response = await Comments.updateOne(
{
productId,
comments: { $elemMatch: { usersWhoLiked: { $elemMatch: { userId } } } },
},
{
$set: { 'comments.$[outer].usersWhoLiked.$[inner].likeUp': likes.up },
},
{
arrayFilters: [{ 'outer._id': commentId }, { 'inner._userId': userId }],
}
).exec();
return res.status(201).json({ response });
} catch (err) {
console.log(err);
return res.send(err);
}
This is the collection, that I am trying to update:
{
"_id": {
"$oid": "6307569d2308b78b378cc802"
},
"productId": "629da4b6634d5d11a859d729",
"comments": [
{
"userId": "62f29c2c324f4778dff443f6",
"userName": "User",
"date": "2022.08.25",
"confirmed": true,
"likes": {
"up": 0,
"down": 0
},
"content": {
"rating": 5,
"description": "Nowy komentarz"
},
"_id": {
"$oid": "630756b22308b78b378cc809"
},
"usersWhoLiked": [
{
"userId": "62f29c2c324f4778dff443f1",
"likeUp": true,
"_id": {
"$oid": "6307572d2308b78b378cc80e"
}
},
{
"userId": "62f29c2c324f4778dff443f2",
"likeUp": true,
"_id": {
"$oid": "6307572d2308b78b378cc80c"
}
}
]
}
],
"__v": 0
}
Mongooes schema for comment collection:
const commentSchema = new Schema({
productId: String,
comments: [
{
userId: String,
userName: String,
date: String,
confirmed: Boolean,
likes: {
up: {
type: Number,
default: 0,
},
down: {
type: Number,
default: 0,
},
},
content: {
rating: Number,
description: String,
},
usersWhoLiked: [{ userId: String, likeUp: Boolean }],
},
],
});
I guess the problem is with your arrayFilters operator, because you are trying to filter by field _userId which does not exist:
arrayFilters: [{ 'outer._id': commentId }, { 'inner._userId': userId }],
I managed to update the likeUp value using the following query:
db.collection.update({
_id: ObjectId("6307569d2308b78b378cc802")
},
{
$set: {
"comments.$[user].usersWhoLiked.$[like].likeUp": false
}
},
{
arrayFilters: [
{
"user._id": ObjectId("630756b22308b78b378cc809")
},
{
"like.userId": "62f29c2c324f4778dff443f1"
}
]
})
Try it on MongoDB playground: https://mongoplayground.net/p/XhQMNBgEdhp

Querying String data from database into array Sequelize

So i basically trying to get string value from database into an Array using Sequelize:
{
"status": "success",
"data": [
{
"id": 15,
"title": "The Godfather",
"year": 1972,
"director": "Francis Ford Coppola",
"categoryFilm": "R",
"Url": "https://m.media-amazon.com/images/M/MV5BMTU4MTgxOTQ0Nl5BMl5BanBnXkFtZTgwNDI0Mjk1NDM#._V1_UY100_CR19,0,100,100_AL_.jpg,https://m.media-amazon.com/images/M/MV5BMTczMTk5MjkwOF5BMl5BanBnXkFtZTgwMDI0Mjk1NDM#._V1_UY100_CR12,0,100,100_AL_.jpg,https://m.media-amazon.com/images/M/MV5BZTFiODA5NWEtM2FhNC00MWEzLTlkYjgtMWMwNzBhYzlkY2U3XkEyXkFqcGdeQXVyMDM2NDM2MQ##._V1_UX100_CR0,0,100,100_AL_.jpg",
"genre": [
{
"genre": "Crime,Drama"
}
]
}
]
}
with my code showed as below, i try sequelize literal to change it but i cannot resolve how to turn it from string into array and split it by "," (comma):
const allFilm = async (req, res) => {
await Film.findAll({
attributes: [
"id",
"title",
"year",
"director",
[sequelize.literal(`"category"."category"`), "categoryFilm"],
[sequelize.literal(`"photo"."photoUrl"`), "Url"],
],
subQuery: false,
include: [
{
model: Genre,
as: "genre",
attributes: ["genre"],
},
{
model: Category,
as: "category",
attributes: [],
},
{
model: Photo,
as: "photo",
attributes: [],
},
],
})
.then((data) => {
res.status(200).json({
status: "success",
data: data,
});
})
.catch((err) => {
res.status(400).json({
status: err,
});
});
};
what i want is like this is there is somthing wrong with my code since i find no error but i can not resolved it into array:
"Url": ["https://m.media-amazon.com/images/M/MV5BMTU4MTgxOTQ0Nl5BMl5BanBnXkFtZTgwNDI0Mjk1NDM#._V1_UY100_CR19,0,100,100_AL_.jpg","https://m.media-amazon.com/images/M/MV5BMTczMTk5MjkwOF5BMl5BanBnXkFtZTgwMDI0Mjk1NDM#._V1_UY100_CR12,0,100,100_AL_.jpg","https://m.media-amazon.com/images/M/MV5BZTFiODA5NWEtM2FhNC00MWEzLTlkYjgtMWMwNzBhYzlkY2U3XkEyXkFqcGdeQXVyMDM2NDM2MQ##._V1_UX100_CR0,0,100,100_AL_.jpg"],
I can't answer your question, but you can maybe try that :
From Load attributes from associated model in sequelize.js
await Film.findAll({
attributes: [
"id",
"title",
"year",
"director",
[sequelize.literal(`"category"."category"`), "categoryFilm"],
[Sequelize.col('photo.photoUrl'), 'Url'] // here
],
subQuery: false,
raw:true, // here
include: [
{
model: Genre,
as: "genre",
attributes: ["genre"],
},
{
model: Category,
as: "category",
attributes: [],
},
{
model: Photo,
as: "photo",
attributes: [],
required: false, // here
},
],
})

Sequelize findAndCountAll pagination issue

When using findAndCountAll with a limit and offset, I get only (for example) 8 rows per page instead of 10.
Here's what I'm using to paginate results (10 per page):
async function allPlayers(req, res) {
const page = parseInt(req.query.page);
const perPage = parseInt(req.query.perPage);
const options = {
where: {
[Op.and]: [
{
type: "player",
},
{
"$teams.team.type$": "club",
},
],
},
include: [
{
model: UserTeam,
duplicating: false,
required: true,
include: [
{
model: Team,
include: [{ model: Club }, { model: School }],
},
],
},
],
};
const { rows, count } = await User.findAndCountAll({
...options,
limit: perPage,
offset: perPage * (page - 1),
});
res.json({ data: { rows, count } });
}
The issue seems to be Sequelize filtering out the rows when returned from SQL, instead of in the query. This happens because of this segment in the find options query:
{
model: UserTeam,
duplicating: false,
required: true,
include: [...],
}
Because of that, instead of returning 10 per paginated page, it's returning 10 or less (depending if any rows were filtered out).
Is there a fix for this behaviour or a different way to re-structure my data so I don't need this nested query?
I need this because I have a database/model structure like this:
User (players, coaches, admins, etc.)
|
|_ UserTeam (pivot table containing userId and teamId)
|
|_ Team
I don't think you need to reference the pivot/join tables in the query itself. There are too many nested levels of entities in your query with School and Club etc.
If you, instead of including the nested data when you perform the query, include the User/Player model and Team model in the UserTeam model with the reference keyword, Sequelize in my experience will handle this type of operation better. The same goes for School and Club, they could be included in their join tables' model definitions instead.
Note that I use the define method and not the class based models. What if you try something like this:
const { DataTypes } = require('sequelize')
const sequelize = require('./../config/db')
const User = require('./User')
const Team = require('./Team')
const UserTeam = sequelize.define('userTeam', {
id: {
type: DataTypes.INTEGER,
primaryKey: true,
autoIncrement: true,
allowNull: false
},
teamId: {
type: DataTypes.UUID,
allowNull: false,
unique: false,
onDelete: 'CASCADE',
references: {
model: Team,
key: 'id'
},
validate: {
isUUID: {
args: 4,
msg: 'Team ID must be a UUID4 string.'
}
}
},
userId: {
type: DataTypes.UUID,
allowNull: false,
unique: false,
onDelete: 'CASCADE',
references: {
model: User,
key: 'id'
},
validate: {
isUUID: {
args: 4,
msg: 'User ID must be a UUID4 string.'
}
}
},
deletedAt: {
type: DataTypes.DATE,
allowNull: true,
defaultValue: null
}
},
{
paranoid: true,
tableName: 'userTeam'
})
Then you include the Team like this:
const users = await User.findAll({
include: Team
})
Associations (it would probably be more logical to call the join table TeamUser and not UserTeam):
User.belongsToMany(Team, { through: UserTeam, foreignKey: 'userId', onDelete: 'CASCADE' })
Team.belongsToMany(User, { through: UserTeam, foreignKey: 'teamId', onDelete: 'CASCADE' })
A json response based on this many-to-many relationship and the query above should result in a list of all users with every team that user is connected to. Here is one of those users:
{
"fullName": "Player One",
"id": "6e8ca258-9daa-4d52-b033-a077d98c29ef",
"firstName": "Player",
"lastName": "One",
"email": "playerone#gmail.com",
"password": "$2a$14$tApwpX9Ld9a1cjZMFzTGZeVEUC01M7n/tSVlldG7OEbm9sEh/k8kW",
"verified": 1,
"verifyCode": "4f49b5ca-12ed5a2d-1608191096291",
"resetPasswordToken": "bd15eda5097e030988eed5d2d20b3bbb6a06439f6b9907783fc0ea19083d8410",
"resetPasswordExpire": 1620266644543,
"passwordChangedAt": 1608337842942,
"createdFromIp": "122.222.10.33",
"createdAt": "2020-12-14T06:33:13.000Z",
"updatedAt": "2021-05-06T01:54:04.000Z",
"deletedAt": null,
"teams": [
{
"id": "427e9de4-9318-4406-aed9-fcbb3b8a3282",
"name": "Texas Rangers",
"type": "MLB",
"slug": "texas-rangers",
"createdByUserId": "6e8ca258-9daa-4d52-b033-a077d98c29ef",
"createdAt": "2020-12-14T06:33:15.000Z",
"updatedAt": "2020-12-29T06:07:54.000Z",
"deletedAt": null,
"userTeams": {
"id": 54,
"teamId": "427e9de4-9318-4406-aed9-fcbb3b8a3282",
"userId": "6e8ca258-9daa-4d52-b033-a077d98c29ef",
"deletedAt": null,
"createdAt": "2020-12-14T06:33:15.000Z",
"updatedAt": "2020-12-14T06:33:15.000Z"
}
},
{
"id": "cbff6df7-0e0c-4906-9e1c-54b569079d83",
"name": "New York Yankees",
"type": "MLB",
"slug": "yankees",
"createdByUserId": "16f38fc5-63d1-4285-8773-526720f9a506",
"createdAt": "2020-12-14T23:57:11.000Z",
"updatedAt": "2021-07-06T06:08:35.000Z",
"deletedAt": null,
"userTeams": {
"id": 55,
"teamId": "cbff6df7-0e0c-4906-9e1c-54b569079d83",
"userId": "6e8ca258-9daa-4d52-b033-a077d98c29ef",
"deletedAt": null,
"createdAt": "2020-12-14T23:57:11.000Z",
"updatedAt": "2020-12-14T23:57:11.000Z"
}
}
]
}
Let try this function bro
paginate: ({
currentPage,
pageSize
}) => {
const offset = parseInt((currentPage - 1) * pageSize, 10);
const limit = parseInt(pageSize, 10);
return {
offset,
limit,
};
},
// import function here
const result = await city.findAndCountAll({
where: conditions,
order: [
['createdAt', 'DESC']
],
...paginate({
currentPage: page,
pageSize: limit
}),
})

Update MongoDB object in object by id

Model:
const projectSchema = new Schema({
name: {
type: String,
maxlength: 50
},
description: {
type: String
},
projectLead: {
type: String
},
start: {
type: String
},
end: {
type: String
},
projectType: {
type: String
},
claimId: {
type: String
},
organization: {
type: String
},
seats: [{
id: String,
employee: String,
role: String,
start: String,
end: String,
workload: Number,
skills: Array,
approved: Boolean
}]
}, {
timestamps: true
})
Model response example:
{
"project": {
"_id": "5cab4b9bc9b29a7ba2363875",
"name": "Project Title",
"description": "Project description",
"projectLead": "email#email",
"start": "2018-06-01T09:45:00.000Z",
"end": "2019-06-31T09:45:00.000Z",
"claimId": "AIIIIII",
"organization": "Company ACE",
"seats": [
{
"skills": [
"Node.js",
"Vue.js"
],
"_id": "5cab548e5cefe27ef82ca313",
"start": "2018-06-01T09:45:00.000Z",
"end": "2019-06-31T09:45:00.000Z",
"role": "Developer",
"approved": false,
"workload": 20,
"employee": ''
}
],
"createdAt": "2019-04-08T13:24:43.253Z",
"updatedAt": "2019-04-08T14:02:54.257Z",
"__v": 0
}
}
Controller:
exports.updateSeatEmployee = async (req, res, next) => {
try {
const project = await Project.findOneAndUpdate({
"seats._id": req.params.id
}, {
"seats.employee": req.body.employee
}, {
new: true
})
console.log("project", project);
return res.json({
project: project
})
} catch (error) {
next(error)
}
}
Debugger:
Mongoose: users.createIndex({ email: 1 }, { unique: true, background:
true }) { sub: '5cab48ebec24577ab3329fcd', iat: 1554729210 }
Mongoose: users.findOne({ _id: ObjectId("5cab48ebec24577ab3329fcd") },
{ projection: {} })
Mongoose: projects.findAndModify({ 'seats._id':
ObjectId("5cab529bcafa027e30ee229c") }, [], { '$setOnInsert': {
createdAt: new Date("Mon, 08 Apr 2019 14:45:56 GMT") }, '$set': {
'seats.$.employee': 'email#email.com', updatedAt: new
Date("Mon, 08 Apr 2019 14:45:56 GMT") } }, { new: true, upsert: false,
remove: false, projection: {} }) (node:33302) DeprecationWarning:
collection.findAndModify is deprecated. Use findOneAndUpdate,
findOneAndReplace or findOneAndDelete instead.
project null
I want to search for a specific object in seats by its id.
This specific seat should be update. In my case I want to update the employee field.
If I do a findOne({"seats._id": req.params.id}) I get the project back but findOneAndUpdate returns null.
You need the $ positional operator since seats is an array and you should use $set operator if you want to update just one field
const project = await Project.findOneAndUpdate({
"seats._id": req.params.id
}, {
$set: { "seats.$.employee": req.body.employee }
}

Avoiding Unique error E11000 with Promise.all

I have been using this mongoose plugin to perform findOrCreate which is used very often in the codebase.
I recently realized that performing multiple asynchronous findOrCreate operations when the unique index is created easily leads to an E11000 duplicate key error.
An example can be described by the following using Promise.all. Suppose name is unique then:
const promises = await Promise.all([
Pokemon.findOrCreate({ name: 'Pikachu' }),
Pokemon.findOrCreate({ name: 'Pikachu' }),
Pokemon.findOrCreate({ name: 'Pikachu' })
]);
The above will certainly fail since findOrCreate is not atomic. It makes sense after thinking about it why it fails but, what I would like is a streamlined way of approaching this problem.
Many of my models use findOrCreate and they are all subject to this problem. One solution that comes to mind would be to create a plugin that would catch the error and then return the result of find however, there may be a better approach here - possibly a native mongoose one that I am not aware of.
It certainly depends on your intended usage of this, but I would say overall that "plugins" are just not required. The basic functionality you are looking for is already "built in" to MongoDB with "upserts".
By definition, an "upsert" cannot produce a "duplicate key error" as long as the query condition to "select" the document is issued using the "unique key" for the collection. In this case "name".
In a nutshell you can mimic the same behavior as above by simply doing:
let results = await Promise.all([
Pokemon.findOneAndUpdate({ "name": "Pikachu" },{},{ "upsert": true, "new": true }),
Pokemon.findOneAndUpdate({ "name": "Pikachu" },{},{ "upsert": true, "new": true }),
Pokemon.findOneAndUpdate({ "name": "Pikachu" },{},{ "upsert": true, "new": true })
]);
Which would simply "create" the item on the first call where it did not already exist, or "return" the existing item. This is how "upserts" work.
[
{
"_id": "5a022f48edca148094f30e8c",
"name": "Pikachu",
"__v": 0
},
{
"_id": "5a022f48edca148094f30e8c",
"name": "Pikachu",
"__v": 0
},
{
"_id": "5a022f48edca148094f30e8c",
"name": "Pikachu",
"__v": 0
}
]
If you really did not care about "returning" each call and simply wanted to "update or create", then it's actually far more efficient to simply send one request with bulkWrite():
// Issue a "batch" in Bulk
let result = await Pokemon.bulkWrite(
Array(3).fill(1).map( (e,i) => ({
"updateOne": {
"filter": { "name": "Pikachu" },
"update": {
"$set": { "skill": i }
},
"upsert": true
}
}))
);
So instead of awaiting the server to resolve three async calls, you only make one which either "creates" the item or "updates" with anything you use in the $set modifier when found. These are applied on every match including the first, and if you want "only on create" there is $setOnInsert to do that.
Of course this is just a "write", so it really depends on whether it is important to you to return the modified document or not. So "bulk" operations simply "write" and they do not return, but instead return information on the "batch" indicating what was "upserted" and what was "modified" as in:
{
"ok": 1,
"writeErrors": [],
"writeConcernErrors": [],
"insertedIds": [],
"nInserted": 0,
"nUpserted": 1, // <-- created 1 time
"nMatched": 2, // <-- matched and modified the two other times
"nModified": 2,
"nRemoved": 0,
"upserted": [
{
"index": 0,
"_id": "5a02328eedca148094f30f33" // <-- this is the _id created in upsert
}
],
"lastOp": {
"ts": "6485801998833680390",
"t": 23
}
}
So if you do want a "return", then a more typical case is to separate which data you want on "create" and which is needed on "update". Noting that the $setOnInsert is essentially "implied" for whatever values are in the "query" condition to select the document:
// Issue 3 pokemon as separate calls
let sequence = await Promise.all(
Array(3).fill(1).map( (e,i) =>
Pokemon.findOneAndUpdate(
{ name: "Pikachu" },
{ "$set": { "skill": i } },
{ "upsert": true, "new": true }
)
)
);
Which would show the modifications applied in "sequence" of each atomic transaction:
[
{
"_id": "5a02328fedca148094f30f38",
"name": "Pikachu",
"__v": 0,
"skill": 0
},
{
"_id": "5a02328fedca148094f30f39",
"name": "Pikachu",
"__v": 0,
"skill": 1
},
{
"_id": "5a02328fedca148094f30f38",
"name": "Pikachu",
"__v": 0,
"skill": 2
}
]
So generally it's "upserts" that you want here, and depending on your intent you either use separate calls to return each modification/creation or you issue your "writes" in a batch.
As a complete listing to demonstrate all the above:
const mongoose = require('mongoose'),
Schema = mongoose.Schema;
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
const uri = 'mongodb://localhost/test',
options = { useMongoClient: true };
const pokemonSchema = new Schema({
name: String,
skill: Number
},{ autoIndex: false });
pokemonSchema.index({ name: 1 },{ unique: true, background: false });
const Pokemon = mongoose.model('Pokemon', pokemonSchema);
function log(data) {
console.log(JSON.stringify(data, undefined, 2))
}
(async function() {
try {
const conn = await mongoose.connect(uri,options);
// Await index creation, otherwise we error
await Pokemon.ensureIndexes();
// Clean data for test
await Pokemon.remove();
// Issue 3 pokemon as separate calls
let pokemon = await Promise.all(
Array(3).fill(1).map( e =>
Pokemon.findOneAndUpdate({ name: "Pikachu" },{},{ "upsert": true, "new": true })
)
);
log(pokemon);
// Clean data again
await Pokemon.remove();
// Issue a "batch" in Bulk
let result = await Pokemon.bulkWrite(
Array(3).fill(1).map( (e,i) => ({
"updateOne": {
"filter": { "name": "Pikachu" },
"update": {
"$set": { "skill": i }
},
"upsert": true
}
}))
);
log(result);
let allPokemon = await Pokemon.find();
log(allPokemon);
// Clean data again
await Pokemon.remove();
// Issue 3 pokemon as separate calls
let sequence = await Promise.all(
Array(3).fill(1).map( (e,i) =>
Pokemon.findOneAndUpdate(
{ name: "Pikachu" },
{ "$set": { "skill": i } },
{ "upsert": true, "new": true }
)
)
);
log(sequence);
} catch(e) {
console.error(e);
} finally {
mongoose.disconnect();
}
})()
Which would produce the output ( for those too lazy to run themselves ):
Mongoose: pokemons.ensureIndex({ name: 1 }, { unique: true, background: false })
Mongoose: pokemons.remove({}, {})
Mongoose: pokemons.findAndModify({ name: 'Pikachu' }, [], { '$setOnInsert': { __v: 0 } }, { upsert: true, new: true, remove: false, fields: {} })
Mongoose: pokemons.findAndModify({ name: 'Pikachu' }, [], { '$setOnInsert': { __v: 0 } }, { upsert: true, new: true, remove: false, fields: {} })
Mongoose: pokemons.findAndModify({ name: 'Pikachu' }, [], { '$setOnInsert': { __v: 0 } }, { upsert: true, new: true, remove: false, fields: {} })
[
{
"_id": "5a023461edca148094f30f82",
"name": "Pikachu",
"__v": 0
},
{
"_id": "5a023461edca148094f30f82",
"name": "Pikachu",
"__v": 0
},
{
"_id": "5a023461edca148094f30f82",
"name": "Pikachu",
"__v": 0
}
]
Mongoose: pokemons.remove({}, {})
Mongoose: pokemons.bulkWrite([ { updateOne: { filter: { name: 'Pikachu' }, update: { '$set': { skill: 0 } }, upsert: true } }, { updateOne: { filter: { name: 'Pikachu' }, update: { '$set': { skill: 1 } }, upsert: true } }, { updateOne: { filter: { name: 'Pikachu' }, update: { '$set': { skill: 2 } }, upsert: true } } ], {})
{
"ok": 1,
"writeErrors": [],
"writeConcernErrors": [],
"insertedIds": [],
"nInserted": 0,
"nUpserted": 1,
"nMatched": 2,
"nModified": 2,
"nRemoved": 0,
"upserted": [
{
"index": 0,
"_id": "5a023461edca148094f30f87"
}
],
"lastOp": {
"ts": "6485804004583407623",
"t": 23
}
}
Mongoose: pokemons.find({}, { fields: {} })
[
{
"_id": "5a023461edca148094f30f87",
"name": "Pikachu",
"skill": 2
}
]
Mongoose: pokemons.remove({}, {})
Mongoose: pokemons.findAndModify({ name: 'Pikachu' }, [], { '$setOnInsert': { __v: 0 }, '$set': { skill: 0 } }, { upsert: true, new: true, remove: false, fields: {} })
Mongoose: pokemons.findAndModify({ name: 'Pikachu' }, [], { '$setOnInsert': { __v: 0 }, '$set': { skill: 1 } }, { upsert: true, new: true, remove: false, fields: {} })
Mongoose: pokemons.findAndModify({ name: 'Pikachu' }, [], { '$setOnInsert': { __v: 0 }, '$set': { skill: 2 } }, { upsert: true, new: true, remove: false, fields: {} })
[
{
"_id": "5a023461edca148094f30f8b",
"name": "Pikachu",
"__v": 0,
"skill": 0
},
{
"_id": "5a023461edca148094f30f8b",
"name": "Pikachu",
"__v": 0,
"skill": 1
},
{
"_id": "5a023461edca148094f30f8b",
"name": "Pikachu",
"__v": 0,
"skill": 2
}
]
N.B The $setOnInsert is also "implied" in all "mongoose" operations for the purpose of applying the __v key. So unless you turn this off, that statement is always "merged" with whatever is issued and thus allows the {} in the first example "update" block which would be an error in the core driver due to no update modifier being applied, yet mongoose adds this one for you.
Also note that bulkWrite() does not actually reference the "schema" for the model and bypasses it. This is why there is no __v in those issued updates, and it does indeed bypass all validation as well. This is usually not an issue, but it is something you should be aware of.

Categories

Resources