Executing mysql queries sequentially nodejs - javascript

I am pretty new to nodejs and am using it to construct an api with a mysql database and have run into an issue where I am unable to execute mysql queries sequentially.
The database structure is that there are three tables. Table a in a one to many relation with table b, which is in a one to many relation with table c.
I need a GET endpoint where aid is given and it returns a result with an array of items of b with the array of items of c nested inside it.
Sample Result:
{
logcode: "",
logmessage: "",
bitems: [{
bitemid: 1
citems: [{
citemid: 1
} {
citemid: 2
}]
}{
bitemid: 2
citems: [{
citemid: 3
}]
}]
}
Currently I am attempting to do this by first executing a query where I retrieve all the values of type b that correspond to the received key of entity a and then running a foreach loop over the results and extracting the bitem ids and then running another query within the foreach loop to get all items of table c with that specific foreign key.
async function (req, res) {
let functionname = 'getAllItems'
const conLocalPool = db.conLocalPool.promise();
var data = {}
const atoken = req.get('token');
var str = ``;
str = `call ${functionname}('${atoken}')`;
console.log(str);
try {
const [rows, fields] = await conLocalPool.query(str)
data = rows[0][0]
if (data.logcode === 20100) {
data.bitems = rows[1];
data.bitems.forEach(async (bitem) => {
var stmt = `Select * from \`table-c\` where bitemid=${bitem.id}`
try {
const [citemrows, citemfields] = await conLocalPool.query(stmt);
console.log(citemrows[1])
bitem.citems = citemrows[1];
return true;
}
catch (err) {
console.log(err);
return false;
}
})
}
res.status(200).send(data);
return true;
}
catch (err) {
res.status(500).send({
error: err.message
});
return false;
}
}
Using this function, I am able to get a response which contains all the bitems related to the aitemtoken but without the citems related to each individual bitem.
I would like some help on how to execute the first query and then the later queries based on the response that it retrieves.

According to your last comment, I'm sure that you can achieve it by doing JOIN. There are a few ways to do it but consider this example below:
SELECT * FROM owner o
JOIN dogs d ON o.id=d.owner_id
JOIN dog_toys t ON d.id=t.dog_id
WHERE o.id=1;
Assuming that you have 3 tables (owner, dogs & dog_toys) and each of the table have a relation of owner.id=dogs.owner_id and dogs.id=dog_toys.dog_id, you can simply JOIN all three of them in one query and modify the returned results in SELECT.
I've created a sample fiddle here : https://www.db-fiddle.com/f/ukBgL4M3NzkyTDC6nMGkJn/1

Related

Override Mongoose save method to retry on `duplicate key error`

My Mongoose schema uses a custom _id value and the code I inherited does something like this
const sampleSchema = new mongoose.Schema({
_id: String,
key: String,
});
sampleSchema.statics.generateId = async function() {
let id;
do {
id = randomStringGenerator.generate({length: 8, charset: 'hex', capitalization: 'uppercase'});
} while (await this.exists({_id: id}));
return id;
};
let SampleModel = mongoose.model('Sample', sampleSchema);
A simple usage looks like this:
let mySample = new SampleModel({_id: await SampleModel.generateId(), key: 'a' });
await mySample.save();
There are at least three problems with this:
Every save will require at least two trips to the database, one to test for a unique id and one to save the document.
For this to work, it is necessary to manually call generateId() before each save. An ideal solution would handle that for me, like Mongoose does with ids of type ObjectId.
Most significantly, there is a potential race condition that will result in duplicate key error. Consider two clients running this code. Both coincidentally generate the same id at the same time, both look in the database and find the id absent, both try to write the record to the database. The second will fail.
An ideal solution would, on save, generate an id, save it to the database and on duplicate key error, generate a new id and retry. Do this in a loop until the document is stored successfully.
The trouble is, I don't know how to get Mongoose to let me do this.
Here's what I tried: Based on this SO Question, I found a rather old sample (using a very old mongoose version) of overriding the save function to accomplish something similar and based this attempt off it.
// First, change generateId() to force a collision
let ids = ['a', 'a', 'a', 'b'];
let index = 0;
let generateId = function() {
return ids[index++];
};
// Configure middleware to generate the id before a save
sampleSchema.pre('validate', function(next) {
if (this.isNew)
this._id = generateId();
next();
});
// Now override the save function
SampleModel.prototype.save_original = SampleModel.prototype.save;
SampleModel.prototype.save = function(options, callback) {
let self = this;
let retryOnDuplicate = function(err, savedDoc) {
if (err) {
if (err.code === 11000 && err.name === 'MongoError') {
self.save(options, retryOnDuplicate);
return;
}
}
if (callback) {
callback(err, savedDoc);
}
};
return self.save_original(options, retryOnDuplicate);
}
This gets me close but I'm leaking a promise and I'm not sure where.
let sampleA = new SampleModel({key: 'a'});
let sampleADoc = await sampleA.save();
console.log('sampleADoc', sampleADoc); // prints undefined, but should print the document
let sampleB = new SampleModel({key: 'b'});
let sampleBDoc = await sampleB.save();
console.log('sampleBDoc', sampleBDoc); // prints undefined, but should print the document
let all = await SampleModel.find();
console.log('all', all); // prints `[]`, but should be an array of two documents
Output
sampleADoc undefined
sampleBDoc undefined
all []
The documents eventually get written to the database, but not before the console.log calls are made.
Where am I leaking a promise? Is there an easier way to do this that addresses the three problems I outlined?
Edit 1:
Mongoose version: 5.11.15
I fixed the problem by changing the save override. The full solution looks like this:
const sampleSchema = new mongoose.Schema({
_id: String,
color: String,
});
let generateId = function() {
return randomStringGenerator.generate({length: 8, charset: 'hex', capitalization: 'uppercase'});
};
sampleSchema.pre('validate', function() {
if (this.isNew)
this._id = generateId();
});
let SampleModel = mongoose.model('Sample', sampleSchema);
SampleModel.prototype.save_original = SampleModel.prototype.save;
SampleModel.prototype.save = function(options, callback) {
let self = this;
let isDupKeyError = (error, field) => {
// Determine whether the error is a duplicate key error on the given field
return error?.code === 11000 && error?.name === 'MongoError' && error?.keyValue[field];
}
let saveWithRetries = (options, callback) => {
// save() returns undefined if used with callback or a Promise otherwise.
// https://mongoosejs.com/docs/api/document.html#document_Document-save
let promise = self.save_original(options, callback);
if (promise) {
return promise.catch((error) => {
if (isDupKeyError(error, '_id')) {
return saveWithRetries(options, callback);
}
throw error;
});
}
};
let retryCallback;
if (callback) {
retryCallback = (error, saved, rows) => {
if (isDupKeyError(error, '_id')) {
saveWithRetries(options, retryCallback);
} else {
callback(error, saved, rows);
}
}
}
return saveWithRetries(options, retryCallback);
}
This will generate an _id repeatedly until a successful save is called and addresses the three problems outlined in the original question:
The minimum trips to the database has been reduced from two to one. Of course, if there are collisions, more trips will occur but that's the exceptional case.
This implementation takes care of generating the id itself with no manual step to take before saving. This reduces complexity and removes the required knowledge of prerequisites for saving that are present in the original method.
The race condition has been addressed. It won't matter if two clients attempt to use the same key. One will succeed and the other will generate a new key and save again.
To improve this:
There ought to be a maximum number of save attempts for a single document followed by failure. In this case, you've perhaps used up all the available keys in whatever domain you're using.
The unique field may not be named _id or you might have multiple fields that require a unique generated value. The embedded helper function isDupKeyError() could be updated to look for multiple keys. Then on error you could add logic to regenerate just the failed key.

Snowflake only executes first SQL command in stored procedure

I was trying to create a procedure that copies the content of my table into S3 partitioned by 2 different combinations. For that I did the following:
$$
var cmd_partition1 = `...`
var cmd_partition2 = `...`
var store_data_partitioned_by_1_command = snowflake.createStatement({ sqlText: cmd_partition1 })
var store_data_partitioned_by_2_command = snowflake.createStatement({ sqlText: cmd_partition2 })
try {
store_data_partitioned_by_1_command.execute()
store_data_partitioned_by_2_command.execute()
return 'Succeeded.'
}
catch (err) {
return "Failed: " + err
}
$$;
However, each time I execute the procedure the partitioning is only performed for the 1st combination, while the 2nd one is ignored.
Do you know why this is happening and how can I solve it?
I tested each one of the cmd_partition (1 and 2) in the Snowflake GUI and both of them work as expected.
create table test_sp(id int);
-- test sql is good
insert into test_sp values (1);
insert into test_sp values (2);
-- clean up
truncate table test_sp;
create or replace procedure double_exec()
returns varchar
language javascript as
$$
var cmd_partition1 = `insert into test_sp values (1)`
var cmd_partition2 = `insert into test_sp values (2)`
var store_data_partitioned_by_1_command = snowflake.createStatement({ sqlText: cmd_partition1 })
var store_data_partitioned_by_2_command = snowflake.createStatement({ sqlText: cmd_partition2 })
try {
store_data_partitioned_by_1_command.execute()
store_data_partitioned_by_2_command.execute()
return 'Succeeded.'
}
catch (err) {
return "Failed: " + err
}
$$;
and now to run
call double_exec();
DOUBLE_EXEC
Succeeded.
so lets check the steps ran
select * from test_sp;
ID
1
2
so the concept of having to executions in a row is valid.
which makes it something about the SQL itself, and not the stored procedure.

Unable to add object fetched from mongoDB to normal javascript array

I am trying to create an add to cart button which fetches the data from product database using the id of specific product which I selected. I am trying to push the object found using the same Id into a normal javascript array and then to display it using ejs methods. While I was tring I found I am unable to push the data in object form.
Summary:
On 7th line I have declared an array and in that array I want to store some objects which I have fetched frome a db model.
On 15th line I am trying to push the object form into my array so that I could iterate through the objects to display them on my page using ejs. But I am unable to do that.
screenshots:
Here's the final result I'm getting even after trying to push objects in array:
empty array logged
Here are the objects I'm trying to push:
Objects
Code:
app.get("/cart", (req, res) => {
if (req.isAuthenticated()) {
const findcartdata = req.user.username;
userData.findOne({email: findcartdata}, (err, BookId) => {
// console.log(BookId.cartItemId);
const idArray = BookId.cartItemId;
var bookArray = [];
idArray.forEach((data) => {
productData.findOne({_id: data}, (err, foundBookData) =>{
// console.log(foundBookData);
if(err){
console.log(err);
}
else{
bookArray.push(foundBookData);
}
})
});
console.log(bookArray);
// res.render("cart", {
// cartBookArray: BookId.cartItemId
// })
});
} else {
res.redirect("/login");
}
})
In above code i found the user's email using passport authentication user method and using that email I wanted to add the products in a different javascript array (which I am goint to pass to my ejs file of cart and then iterate it on list) using those array of Id which I got from another model called userData. The problem is I am able to find userData of each Id but unable to store them as an array of objects.
Looks like a timing issue, your code completes before the database downloads the objects and pushes them to your array.
This should fix your issue:
// ...
const idArray = BookId.cartItemId;
var bookArray = [];
for (const data of idArray) {
const foundBookData = await productData.findOne({_id: data}).catch(console.error);
if (!foundBookData) continue;
bookArray.push(foundBookData);
}
console.log(bookArray);
// ...
By the way, make sure to make the whole function asynchronous as well, which would be done by changing this line:
userData.findOne({email: findcartdata}, async (err, BookId) => { // ...

How to order contacts by last message date with Sequelize?

I'm trying to create a chat app in nodejs with Sequelize for my db.
Everything works great but when i try to show all user friends ordered by the last message date i have a bug.
To get the users with there messages it works with associations but ordering doesn't.
Here is my function to get messages:
const getFriends = (id, limit) => {
return new Promise(async (resolve, reject) => {
try {
const user = await Follows.findAll({
where: {
[Op.or]: {
follower_id: id,
followed_id: id
},
},
include: Messages
order: [[Messages, 'createdAt', 'DESC']],
limit
})
resolve(user)
} catch (e) {
reject(e)
}
})
}
So Seqeulize does the query but not in the way that I want to.
Here is the query generated by Sequelize:
SELECT `follows`.*,
`messages`.*
FROM (
SELECT `follows`.* FROM `follows`
WHERE (
`follows`.`follower_id` = 3
OR
`follows`.`followed_id` = 3
)
AND `follows`.`reciprocal` = true
LIMIT 1
)
LEFT OUTER JOIN `messages` ON `follows`.`id` = `messages`.`pairId`
ORDER BY `messages`.`createdAt` DESC;
As you see here the ORDER BY messages.createdAt DESC; happens at the end, but after the FROM you can see he does a query to the table i'm already querying.
Which is not LOGIC !!
I would like to my sql query to look like that:
SELECT `follows`.*,
`messages`.*
FROM `follows`
LEFT OUTER JOIN `messages` ON `follows`.`id` = `messages`.`pairId`
WHERE (
`follows`.`follower_id` = 3
OR
`follows`.`followed_id` = 3
)
AND `follows`.`reciprocal` = true
ORDER BY `messages`.`createdAt` DESC
LIMIT 1
That means that I want my where and my limit to be in the parent query and not in the FROM query.
I really don't know how to do this with Sequelize and didn't find someone having same issue on the internet.
I would be really happy to have some help!
Actualy I found the answer.
I just needed to add subQuery:false and it worked as I wanted !

Is there a way to access data from another table during table.read - Azure Mobile App

I am trying to get data from another database before reading data in the table. However, I can't seem to find a way to access it properly.
The best I've got so far is based on some other examples both on Microsoft's documentation and on StackOverflow but they all seem to fail.
table.read(function (context) {
var results = context.tables("table2").read();
var text = results[0].column;
context.query.where({ columnName: text });
return context.execute();
});
I get an error when doing this saying that column doesn't exist.
As per your description, if I do not misunderstand, you want to query table2 in table1 operations in EasyTables scripts.
we can leverage "use()" to custom middleware to specify middleware to be executed for every request against the table as the description on the document of azure-mobile-apps sdk at
E.G.
var queries = require('azure-mobile-apps/src/query');
var insertMiddleware = function(req,res,next){
var table = req.azureMobile.tables('table2'),
query = queries.create('table2')
.where({ TestProperty : req.body.testproperty });
table.read(query).then(function(results) {
if(results){
req.someStoreData = somehander(results); //some hander operations here to get what you want to store and will use in next step
next();
}else{
res.send("no data");
}
});
};
table.insert.use(insertMiddleware, table.operation);
table.insert(function (context) {
console.log(context.req.someStoreData);
return context.execute();
});
More example:
async function filterByAllowedDomain(context) {
var domains = await context.tables('domains')
.where({ allowed: true })
.read();
var categories = await context.tables('categories')
.where(function (ids) {
return this.domainId in ids;
}, domains.map(d => d.id))
.read();
context.query.where(function (ids) {
return this.categoryId in ids;
}, categories.map(c => c.id));
return context.execute(); }
The tables module in azure-mobile-apps-node sdk contains functionality for adding tables to an Azure Mobile App. It returns a router that can be attached to an express app with some additional functions for registering tables. Which actually leverage Azure SQL (SQL Server database service on Azure).
Hope it helps.

Categories

Resources