I am facing an issue in the association of sequelize
My data was saved in the database but roles are not saved
I making registration using postgres but facing an issue of setRole is not defined
My code is
Auth_User.js is a model for signIn and signUp user we create the association in this file
import Sequelize from "sequelize";
import { sequelize } from "../Database/database";
const Auth_User = sequelize.define("authUsers", {
userName: {
type: Sequelize.STRING,
},
email: {
type: Sequelize.STRING,
},
password: {
type: Sequelize.STRING,
},
});
Auth_User.associate = function(models) {
Auth_User.belongsToMany(models.Role, { through: 'user_roles', foreignKey: 'userId', otherKey: 'roleId'});
};
export default Auth_User;
Role.js in this file we create a role model where we have admin and user
import Sequelize from "sequelize";
import { sequelize } from "../Database/database";
const Role = sequelize.define('roles', {
id: {
type: Sequelize.INTEGER,
primaryKey: true
},
name: {
type: Sequelize.STRING
}
});
Role.associate = function(models) {
Role.belongsToMany(models.Auth_User, { through: 'user_roles', foreignKey: 'roleId', otherKey: 'userId'})
};
export default Role;
Know signup.js
here we create signup we are facing the issue in this file
making issue here user.setRole is not a function
import Auth_User from '../Model/auth.model';
import Role from '../Model/role.model';
var bcrypt = require('bcryptjs');
var jwt = require('jsonwebtoken');
var db= require('../Database/database')
const Op = db.Sequelize.Op;
export const signup = (req, res) => {
// Save Auth_User to Database
console.log("Processing func -> SignUp");
Auth_User.create({
userName: req.body.userName,
email: req.body.email,
password: bcrypt.hashSync(req.body.password, 8)
}).then(user => {
Role.findAll({
where: {
name: {
[Op.or]: req.body.roles.map(role => role.toUpperCase())
}
}
}).then(roles => {
console.log(user)
user.setRole(roles).then(() => {
res.send("User registered successfully!");
});
}).catch(err => {
res.status(500).send("Error -> " + err);
});
}).catch(err => {
res.status(500).send("Fail! Error -> " + err);
})
}
First of all, I recommend you to be really careful with the model name you choose in the model definition: sequelize.define(modelName, attributes, options). Sequelize automatically makes the plural for you, so a good way to define tables is to always use the singular, but of course this is completely optional. This means that if you use as model name role instead of roles, the table name will be automatically set as roles by Sequelize.
The second recommendation is to not use the uppercase in this same model name, because when you have to deal with automatically created foreign keys and you want to access them through object.property, you will have to use upper or lowercase depending on how you defined the name. So, for example if you have a companyId in a Users table, in order to obtain it you will have to call user.CompanyId if your model name for the company table was written as Company and user.companyId if it was written as company.
Finally, and answering your main question, I recommend you to look the official docs for Sequelize regarding the Special methods/mixins added to instances (https://sequelize.org/master/manual/assocs.html#special-methods-mixins-added-to-instances), where you will find all the different combinations depending on the association. In your specific case, for a belongsToMany association, its special method has to be used using the plural form. In your case, it should be user.setRoles(roles). This doesn't apply, however, if you prefer to use the add method, which can be used either as user.addRole(roles) or user.addRoles(roles)
Related
I'm working on a web application for my company to view a database of customers and their data using MongoDB, Mongoose, and Express. Our company resells used copiers/printers and also provides maintenance contracts for machines. I want to save each customer as a document, with machines as separate linked documents.
I have models, controllers, and routes set up for customers and machines. I am getting the following error when trying to delete a machine from it's customer:
Customer.findByIdAndUpdate is not a function
TypeError: Customer.findByIdAndUpdate is not a function at module.exports.deleteMachine (C:\controllers\machines.js:21:20) at C:\utils\catchAsync.js:3:9 at Layer.handle [as handle_request] (C:\node_modules\express\lib\router\layer.js:95:5) at next (C:\node_modules\express\lib\router\route.js:144:13) at module.exports.getCustomer (C:\middleware.js:15:5) at processTicksAndRejections (node:internal/process/task_queues:96:5)
My code is as follows:
Controller for Machines:
const Customer = require('../models/customer');
const Machine = require('../models/machine');
module.exports.deleteMachine = async (req, res) => {
const { id, machineId } = req.params;
await Customer.findByIdAndUpdate(id, { $pull: { machines: machineId } });
await Machine.findByIdAndDelete(machineId);
req.flash('success', 'Machine has been deleted');
res.redirect(`/customers/${id}`);
};
Route for Machines:
router.delete('/:machineId', getCustomer, catchAsync(machines.deleteMachine));
the "getCustomer" middleware is as follows - its only purpose is to ensure a valid customer is being requested and to set the "foundCustomer" to make my life easier elsewhere. I don't think it is the issue, but I'm including it just for clarity:
module.exports.getCustomer = async (req, res, next) => {
const { id } = req.params;
const customer = await Customer.findById(id).populate({ path: 'machines' });
if (!customer) {
req.flash('error', 'Sorry, that customer cannot be found!');
return res.redirect('/customers');
}
res.locals.foundCustomer = customer;
next();
};
The relevant routes have been set as follows in my app.js:
const customerRoutes = require('./routes/customers');
const machineRoutes = require('./routes/machines');
app.use('/customers', customerRoutes);
app.use('/customers/:id/machines', machineRoutes);
I haven't run into any issues with other machine routes, so I'm not sure why this one is throwing an error. This application is actually the second version that I've made, and the first version uses the exact same code, with no issue. So I'm super stumped.
Any help is greatly appreciated!
Customer Model -
const customerSchema = new Schema({
customer: String,
customerID: String,
category: {
type: String,
enum: ['contracted', 'billable']
},
contacts: [contactSchema],
address: String,
city: String,
state: String,
zip: String,
county: String,
machines: [
{
type: Schema.Types.ObjectId,
ref: 'Machine'
}
],
notes: [noteSchema]
});
I'm a dummy. I exported the Customer model as part of an array of exports like this:
const Customer = mongoose.model('Customer', customerSchema);
module.exports = {
Customer: Customer,
Note: Note,
Contact: Contact
};
When requiring the model in my Machine controller I had it formatted as:
const Customer = require('../models/customer');
To get it working correctly I needed to require it like this:
const { Customer } = require('../models/customer');
After making that change everything is working correctly, and I can move on with my life/application.
Given a user model:
import { model, Schema } from 'mongoose'
export interface User {
email: string
}
const userSchema = new Schema<User>(
{
email: {
type: String,
required: true,
},
},
)
export const UserModel = model<User>('User', userSchema)
I'm trying to save it as so:
// inside an async function
const newUser: HydratedDocument<User> = new UserModel({
email: 'aaa#aaa.com',
})
console.log(newUser)
await newUser.save()
Which results in newUser.save is not a function. What am I missing? Also, here is the output of the `console.log(newUser)
My stupid mistake: I was calling everything on the fronted. Solution is as easy as moving relevant db calls to the endpoint.
You will need to also define a model, change your schema definition to this:
first create model from Schema :
var UserModel = mongoose.model('User', User);
then create object out of User model
var user = new UserModel(req.body)
then call
user.save(function(){});
check documentation http://mongoosejs.com/docs/api.html#model_Model-save
On the server, I am populating user-data and when I am printing it to the console everything is working fine but I am not able to access the data on the client or even on Playground of GraphQL.
This is my Schema
const { model, Schema } = require("mongoose");
const postSchema = new Schema({
body: String,
user: {
type: Schema.Types.ObjectId,
ref: "User",
},
});
module.exports = model("Post", postSchema);
const userSchema = new Schema({
username: String,
});
module.exports = model("User", userSchema);
const { gql } = require("apollo-server");
module.exports = gql`
type Post {
id: ID!
body: String!
user: [User]!
}
type User {
id: ID!
username: String!
}
type Query {
getPosts: [Post]!
getPost(postId: ID!): Post!
}
`;
Query: {
async getPosts() {
try {
const posts = await Post.find()
.populate("user");
console.log("posts: ", posts[0]);
// This works and returns the populated user with the username
return posts;
} catch (err) {
throw new Error(err);
}
},
}
But on the client or even in Playground, I can't access the populated data.
query getPosts {
getPosts{
body
user {
username
}
}
}
My question is how to access the data from the client.
Thanks for your help.
you are using this feature in the wrong way you should defined a Object in your resolvers with your model name and that object should contain a method that send the realated user by the parant value.
here is a full document from apollo server docs for how to use this feature
use lean() like this :
const posts = await Post.find().populate("user").lean();
I'm building an app where a user logs in and can create a grocery list on their account (there are more things they can do like create recipes, but this is the example I want to use). Right now I have it so everybody who logs in sees the same list. But I want each user to be able to log in and view their own grocery list that they made. I'm assuming the logic is literally like logging into a social media site and viewing YOUR profile, not somebody else's.
I'm using mongoDB/mongoose and I just read about the populate method as well as referencing other schemas in your current schema. Here is my schema for the list:
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
// Create item schema
const GroceryListItemSchema = new Schema({
item: {
type: String,
required: [true, 'Item field is required']
},
userId: {
type: mongoose.Schema.Types.ObjectId, ref: "user",
}
});
// Create an Item model
const GroceryListItem = mongoose.model('groceryListItem', GroceryListItemSchema);
module.exports = GroceryListItem;
And here is the post request to add a list item:
//POST request for shopping list
router.post("/list", checkToken, (req, res, next) => {
// Add an item to the database
const groceryListItem = new GroceryListItem({
item: req.body.item,
userId: ???
})
groceryListItem.save()
.then((groceryListItem) => {
res.send(groceryListItem);
})
.catch(next);
});
Here is my userModel - not sure if this is necessary to show:
const mongoose = require("mongoose");
const Schema = mongoose.Schema;
const UserSchema = new Schema({
username: {
type: String,
required: true,
},
email: {
type: String,
required: true,
},
password: {
type: String,
required: true,
},
password2: {
type: String,
required: true,
},
});
const User = mongoose.model("users", UserSchema);
module.exports = User;
(in case anyone is wondering why the model is called "users"-- that's what I initially called it on accident and when I changed the name to "user" it errored out...so I changed it back.)
I am not sure how to add the userId when making an instance of the groceryListItem. In the mongoose docs (https://mongoosejs.com/docs/populate.html#saving-refs), they use the example of a Story and Person Schema. They reference each other, and then they create an instance of Person, calling it author. Then they grab the _id from author and reference it in their instance of Story, called story1. So that makes sense to me. But the only way they can do that is because author and story1 are located in the same file.
So it seems like what I should do is grab the user _id by saying userId: users._id. But my new User instance is in my user routes. And I'd rather not combine the two. Because then I'd have another list to combine as well so that would be my user routes, recipe routes, and shopping list routes all in one file and that would be extremely messy.
Anyone have any idea how I can make this work? It seems so simple but for some reason I cannot figure this out.
Thank you!!
EDIT - frontend API call:
handleSubmitItem = (e) => {
e.preventDefault();
const newItem = {
item: this.state.userInput,
};
authAxios
.post(`http://localhost:4000/list/${userId}`, newItem)
.then((res) => {
this.setState({ items: [...this.state.items, newItem] });
newItem._id = res.data._id;
})
.catch((err) => console.log(err));
this.setState({ userInput: "" });
};
Here you can simply pass in the user ID in the POST request params. The POST URL in the frontend should look like this; {localhost:9000/like/${userID}}
You can get the user ID at the express backend like this;
router.post("/list/:id", checkToken, (req, res, next) => {
// Add an item to the database
const groceryListItem = new GroceryListItem({
item: req.body.item,
userId: req.params.id
})
groceryListItem.save()
.then((groceryListItem) => {
res.send(groceryListItem);
}).catch(next);
});
I'm implementing a GraphQL API over ArangoDB (with arangojs) and I want to know how to best implement dataloader (or similar) for this very basic use case.
I have 2 resolvers with DB queries shown below (both of these work), the first fetches Persons, the 2nd fetches a list of Record objects associated with a given Person (one to many). The association is made using ArangoDB's edge collections.
import { Database, aql } from 'arangojs'
import pick from 'lodash/pick'
const db = new Database('http://127.0.0.1:8529')
db.useBasicAuth('root', '')
db.useDatabase('_system')
// id is the auto-generated userId, which `_key` in Arango
const fetchPerson = id=> async (resolve, reject)=> {
try {
const cursor = await db.query(aql`RETURN DOCUMENT("PersonTable", ${String(id)})`)
// Unwrap the results from the cursor object
const result = await cursor.next()
return resolve( pick(result, ['_key', 'firstName', 'lastName']) )
} catch (err) {
return reject( err )
}
}
// id is the auto-generated userId (`_key` in Arango) who is associated with the records via the Person_HasMany_Records edge collection
const fetchRecords = id=> async (resolve, reject)=> {
try {
const edgeCollection = await db.collection('Person_HasMany_Records')
// Query simply says: `get all connected nodes 1 step outward from origin node, in edgeCollection`
const cursor = await db.query(aql`
FOR record IN 1..1
OUTBOUND DOCUMENT("PersonTable", ${String(id)})
${edgeCollection}
RETURN record`)
return resolve( cursor.map(each=>
pick(each, ['_key', 'intro', 'title', 'misc']))
)
} catch (err) {
return reject( err )
}
}
export default {
Query: {
getPerson: (_, { id })=> new Promise(fetchPerson(id)),
getRecords: (_, { ownerId })=> new Promise(fetchRecords(ownerId)),
}
}
Now, if I want to fetch the Person data with the Records as nested data, in a single request, the query would be this:
aql`
LET person = DOCUMENT("PersonTable", ${String(id)})
LET records = (
FOR record IN 1..1
OUTBOUND person
${edgeCollection}
RETURN record
)
RETURN MERGE(person, { records: records })`
So how should I update my API to employ batch requests / caching? Can I somehow run fetchRecords(id) inside of fetchPerson(id) but only when fetchPerson(id) is invoked with the records property included?
The setup file here, notice I'm using graphql-tools, because I took this from a tutorial somewhere.
import http from 'http'
import db from './database'
import schema from './schema'
import resolvers from './resolvers'
import express from 'express'
import bodyParser from 'body-parser'
import { graphqlExpress, graphiqlExpress } from 'apollo-server-express'
import { makeExecutableSchema } from 'graphql-tools'
const app = express()
// bodyParser is needed just for POST.
app.use('/graphql', bodyParser.json(), graphqlExpress({
schema: makeExecutableSchema({ typeDefs: schema, resolvers })
}))
app.get('/graphiql', graphiqlExpress({ endpointURL: '/graphql' })) // if you want GraphiQL enabled
app.listen(3000)
And here's the schema.
export default `
type Person {
_key: String!
firstName: String!
lastName: String!
}
type Records {
_key: String!
intro: String!
title: String!
misc: String!
}
type Query {
getPerson(id: Int!): Person
getRecords(ownerId: Int!): [Record]!
}
type Schema {
query: Query
}
`
So, the real benefit of dataloader is that it stops you from doing n+1 queries. Meaning for example, if in your schema, Person had a field records, and then you asked for the first 10 people's 10 records. In a naive gql schema, that would cause 11 requests to be fired: 1 for the first 10 people, and then one for each of their records.
With dataloader implemented, you cut that down to two requests: one for the first 10 people, and then one for all of the records of the first ten people.
With your schema above, it doesn't seem that you can benefit in any way from dataloader, since there's no possibility of n+1 queries. The only benefit you might get is caching if you make multiple requests for the same person or records within a single request (which again, isn't possible based on your schema design unless you are using batched queries).
Let's say you want the caching though. Then you could do something like this:
// loaders.js
// The callback functions take a list of keys and return a list of values to
// hydrate those keys, in order, with `null` for any value that cannot be hydrated
export default {
personLoader: new DataLoader(loadBatchedPersons),
personRecordsLoader: new DataLoader(loadBatchedPersonRecords),
};
You then want to attach the loaders to your context for easy sharing. Modified example from Apollo docs:
// app.js
import loaders from './loaders';
app.use(
'/graphql',
bodyParser.json(),
graphqlExpress(req => {
return {
schema: myGraphQLSchema,
context: {
loaders,
},
};
}),
);
Then, you can use them from the context in your resolvers:
// ViewerType.js:
// Some parent type, such as `viewer` often
{
person: {
type: PersonType,
resolve: async (viewer, args, context, info) => context.loaders.personLoader,
},
records: {
type: new GraphQLList(RecordType), // This could also be a connection
resolve: async (viewer, args, context, info) => context.loaders.personRecordsLoader;
},
}
I guess I was confused about the capability of dataloader. Serving nested data was really the stumbling block for me.
This is the missing code. The export from resolvers.js needed a person property,
export default {
Person: {
records: (person)=> new Promise(fetchRecords(person._key)),
},
Query: {
getPerson: (_, { id })=> new Promise(fetchPerson(id)),
getRecords: (_, { ownerId })=> new Promise(fetchRecords(ownerId)),
},
}
And the Person type in the schema needed a records property.
type Person {
_key: String!
firstName: String!
lastName: String!
records: [Records]!
}
Seems these features are provided by Apollo graphql-tools.