I've been using prepared statements for SQL server queries but I don't think I'm using it properly. I want to be able to reuse the queries for different cases that require different parameters but I'm not sure how to go about it. So I've been creating a new function for every different case.
similar to this in java
http://tutorials.jenkov.com/jdbc/preparedstatement.html
I'm using
https://www.npmjs.com/package/mssql
and the documentation doesn't really reuse prepared statements
https://www.npmjs.com/package/mssql#prepared-statement
Instead of select * I want to be able to manipulate which parameters I want without creating a new function every time
async function SelectByStatusId(statusId) {
const ps = new sql.PreparedStatement(connectionPool);
ps.input('statusId', sql.Int);
const statement = await ps.prepare(`SELECT *
FROM table1 WHERE statusId = #statusId`);
const result = await statement.execute({
statusId: statusId
});
await statement.unprepare();
return result.recordset;
}
When updating functions I have to define parameter types and match the body values to the parameter so I'm not sure how I can reuse statements when updating or inserting
async function UpdateTable1(body) {
const ps = new sql.PreparedStatement(pools.poolDatalupa);
ps.input('id', sql.Int);
ps.input('name', sql.NVarChar);
ps.input('number', sql.Float);
const statement = await ps.prepare(`UPDATE table1
SET name=#name,
number=#number
WHERE id = #id`);
const result = await statement.execute({
id: body.id,
name: body.name,
number: body.number
});
await statement.unprepare();
return result;
}
Related
My Mongoose schema uses a custom _id value and the code I inherited does something like this
const sampleSchema = new mongoose.Schema({
_id: String,
key: String,
});
sampleSchema.statics.generateId = async function() {
let id;
do {
id = randomStringGenerator.generate({length: 8, charset: 'hex', capitalization: 'uppercase'});
} while (await this.exists({_id: id}));
return id;
};
let SampleModel = mongoose.model('Sample', sampleSchema);
A simple usage looks like this:
let mySample = new SampleModel({_id: await SampleModel.generateId(), key: 'a' });
await mySample.save();
There are at least three problems with this:
Every save will require at least two trips to the database, one to test for a unique id and one to save the document.
For this to work, it is necessary to manually call generateId() before each save. An ideal solution would handle that for me, like Mongoose does with ids of type ObjectId.
Most significantly, there is a potential race condition that will result in duplicate key error. Consider two clients running this code. Both coincidentally generate the same id at the same time, both look in the database and find the id absent, both try to write the record to the database. The second will fail.
An ideal solution would, on save, generate an id, save it to the database and on duplicate key error, generate a new id and retry. Do this in a loop until the document is stored successfully.
The trouble is, I don't know how to get Mongoose to let me do this.
Here's what I tried: Based on this SO Question, I found a rather old sample (using a very old mongoose version) of overriding the save function to accomplish something similar and based this attempt off it.
// First, change generateId() to force a collision
let ids = ['a', 'a', 'a', 'b'];
let index = 0;
let generateId = function() {
return ids[index++];
};
// Configure middleware to generate the id before a save
sampleSchema.pre('validate', function(next) {
if (this.isNew)
this._id = generateId();
next();
});
// Now override the save function
SampleModel.prototype.save_original = SampleModel.prototype.save;
SampleModel.prototype.save = function(options, callback) {
let self = this;
let retryOnDuplicate = function(err, savedDoc) {
if (err) {
if (err.code === 11000 && err.name === 'MongoError') {
self.save(options, retryOnDuplicate);
return;
}
}
if (callback) {
callback(err, savedDoc);
}
};
return self.save_original(options, retryOnDuplicate);
}
This gets me close but I'm leaking a promise and I'm not sure where.
let sampleA = new SampleModel({key: 'a'});
let sampleADoc = await sampleA.save();
console.log('sampleADoc', sampleADoc); // prints undefined, but should print the document
let sampleB = new SampleModel({key: 'b'});
let sampleBDoc = await sampleB.save();
console.log('sampleBDoc', sampleBDoc); // prints undefined, but should print the document
let all = await SampleModel.find();
console.log('all', all); // prints `[]`, but should be an array of two documents
Output
sampleADoc undefined
sampleBDoc undefined
all []
The documents eventually get written to the database, but not before the console.log calls are made.
Where am I leaking a promise? Is there an easier way to do this that addresses the three problems I outlined?
Edit 1:
Mongoose version: 5.11.15
I fixed the problem by changing the save override. The full solution looks like this:
const sampleSchema = new mongoose.Schema({
_id: String,
color: String,
});
let generateId = function() {
return randomStringGenerator.generate({length: 8, charset: 'hex', capitalization: 'uppercase'});
};
sampleSchema.pre('validate', function() {
if (this.isNew)
this._id = generateId();
});
let SampleModel = mongoose.model('Sample', sampleSchema);
SampleModel.prototype.save_original = SampleModel.prototype.save;
SampleModel.prototype.save = function(options, callback) {
let self = this;
let isDupKeyError = (error, field) => {
// Determine whether the error is a duplicate key error on the given field
return error?.code === 11000 && error?.name === 'MongoError' && error?.keyValue[field];
}
let saveWithRetries = (options, callback) => {
// save() returns undefined if used with callback or a Promise otherwise.
// https://mongoosejs.com/docs/api/document.html#document_Document-save
let promise = self.save_original(options, callback);
if (promise) {
return promise.catch((error) => {
if (isDupKeyError(error, '_id')) {
return saveWithRetries(options, callback);
}
throw error;
});
}
};
let retryCallback;
if (callback) {
retryCallback = (error, saved, rows) => {
if (isDupKeyError(error, '_id')) {
saveWithRetries(options, retryCallback);
} else {
callback(error, saved, rows);
}
}
}
return saveWithRetries(options, retryCallback);
}
This will generate an _id repeatedly until a successful save is called and addresses the three problems outlined in the original question:
The minimum trips to the database has been reduced from two to one. Of course, if there are collisions, more trips will occur but that's the exceptional case.
This implementation takes care of generating the id itself with no manual step to take before saving. This reduces complexity and removes the required knowledge of prerequisites for saving that are present in the original method.
The race condition has been addressed. It won't matter if two clients attempt to use the same key. One will succeed and the other will generate a new key and save again.
To improve this:
There ought to be a maximum number of save attempts for a single document followed by failure. In this case, you've perhaps used up all the available keys in whatever domain you're using.
The unique field may not be named _id or you might have multiple fields that require a unique generated value. The embedded helper function isDupKeyError() could be updated to look for multiple keys. Then on error you could add logic to regenerate just the failed key.
this is my function. If I remove the ? and enter the info manually it executes, I would assume this is how you pass in parameters. is this correct? If i console log the params they all work, I am assuming the way the params are been passed down
async function getMultiple(page = 1){
const offset = helper.getOffset(page, config.listPerPage);
const rows = await db.query(
'SELECT id, quote, author FROM quote LIMIT ?,?',
[offset, config.listPerPage]
);
const data = helper.emptyOrRows(rows);
const meta = {page};
return {
data,
meta
}
}
module.exports = {
getMultiple
}
So it turns out that the mysql version I had installed (8.0.23) has a problem with prepared statements (or a different way). I had to downgrade to less than that and it worked as expected. I downgraded to 5.7
I'm creating a page object model and one of the properties is all the users from a table. The table has a few columns so I'd like to parse that table and create a user object for each row, then return that set to then be used in tests. So far, this is what that property of my page object looks like:
users: {
get: function() {
let userRecords = [];
var users = element.all(by.repeater('user in users')).each(function(tr, index) {
let user = {
name: tr.element(by.css('td:nth-child(2)')).getText().then(text => {return text;}),
role: tr.element(by.css('td:nth-child(3)')).getText().then(text => {expect(text).toBeTruthy();}),
status: tr.element(by.css('td:nth-child(4)')).getText().then(text => {expect(text).toBeTruthy();}),
//actionsButton: tr.element(by.css('btn btn-default'))
};
userRecords += user;
}).then(userRecords => {return userRecords});
return userRecords;
}
},
Through trial and error I encounter one of two outcomes when just trying to print to screen each element of userRecords:
each element prints as undefined or
userRecords is not defined.
Just to reiterate, I'm simply trying to build an array that holds each user as an object to then be able to iterate / filter on that set in my tests.
Given the approach I'm taking, what's the ideal way to construct this user array and resolve the promises?
Edit: it's worth noting that if I do a console.log() within each of the getText().then() statements, it does print the correct data from the table. So, I do know that it's reading the table correctly.
I'd go with a method that returns json, and would make it async
users: async function() {
let userRecords = [];
var users = element.all(by.repeater('user in users'));
for (let i = 0; i < await users.count(); i++) {
let tr = users.get(i);
let user = {
name: await tr.element(by.css('td:nth-child(2)')).getText(),
role: await tr.element(by.css('td:nth-child(3)')).getText(),
status: await tr.element(by.css('td:nth-child(4)')).getText()
};
userRecords.push()
}
return userRecords;
},
and then use:
console.log(
JSON.stringify(
await constructorName.users()
)
)
should be as simple as that. Note, I didn't test the code, but I did use the approach in my experience. So it may require some minor modifications
In general, try to avoid .then - async/await is easier to use, .each - go with for loop instead. Also userRecords += user; doesn't do what you think it does (though I may be wrong here)
I'm trying to add two documents to two different collections.
say coll1 & coll2.
I add a document to coll1 => I get the document id which I would like to set it to the coll2 document as id, I can simply write two add's but I'm trying to get these done in a transaction, so that if one fails both fail.
I could not get that done using this link
Below is the code I've, which needs to be turned to transations/batched:
await db.runTransaction(
async function (transaction) {
const coll1 = {
text: 'This is collection 1 text',
}
const coll1Doc = await db
.collection('coll1')
.add(coll1)
// I tried transaction.set(db.collection('coll1').doc(), coll1) but this doesn't return the doc or the docId which we need in the next step.
// Similay batch.set is also not returning the newly added/edited doc or its Id.
if (coll1Doc && coll1Doc.id) {
const coll1Id = coll1Doc.id
const coll2 = {
text: 'This is collection 2 text',
}
await db
.collection('coll2')
.doc(coll1Id)
.set(coll2)
}
}
)
Firestore document IDs are generated inside your application code, and are statistically guaranteed to be unique. So your add() call, essentially takes these two steps:
Generate a new unique ID
Create a DocumentReference for that ID
Set the data in that DocumentReference
With that knowledge, you can build a DocumentReference yourself based on an ID that you get without using the transaction object.
const coll1Doc = db
.collection('coll1')
.doc();
const id1 = coll1Doc.id;
await coll1Doc.set(coll1);
Now you can use id1 in the second write operation.
I'm struggling to find an example of using a cursor with pg-promise. node-postgres supports its pg-cursor extension. Is there a way to use that extension with pg-promise? I'm attempting to implement an asynchronous generator (to support for-await-of). pg-query-stream doesn't seem to be appropriate for this use case (I need "pull", rather than "push").
As an example, I use SQLite for my unit tests and my (abridged) generator looks something like this...
async function* () {
const stmt = await db.prepare(...);
try {
while (true) {
const record = await stmt.get();
if (isUndefined(record)) {
break;
}
yield value;
}
}
finally {
stmt.finalize();
}
}
Using pg-cursor, the assignment to stmt would become something like client.query(new Cursor(...)), stmt.get would become stmt.read(1) and stmt.finalize would become stmt.close.
Thanks
Following the original examples, we can modify them for use with pg-promise:
const pgp = require('pg-promise')(/* initialization options */);
const db = pgp(/* connection details */);
const Cursor = require('pg-cursor');
const c = await db.connect(); // manually managed connection
const text = 'SELECT * FROM my_large_table WHERE something > $1';
const values = [10];
const cursor = c.client.query(new Cursor(text, values));
cursor.read(100, (err, rows) => {
cursor.close(() => {
c.done(); // releasing connection
});
// or you can just do: cursor.close(c.done);
});
Since pg-promise doesn't support pg-cursor explicitly, one has to manually acquire the connection object and use it directly, as shown in the example above.
pg-query-stream doesn't seem to be appropriate for this use case (I need pull, rather than push).
Actually, in the context of these libraries, both streams and cursors are only for pulling data. So it would be ok for you to use streaming also.
UPDATE
For reading data in a simple and safe way, check out pg-iterator.