Dynamically building MongoDB queries in NodeJS - javascript

I receive a POST argument that looks like this:
sort:
[
{ field: 'name', dir: 'asc', compare: '' },
{ field: 'org', dir: 'asc', compare: '' }
]
}
and I need to create a MongoDB query based on that, so it should look like:
db.collection("my_collection").find( ... ).sort({'name': 'asc', 'org': 'asc'}).toArray(...);
Anyways, keep in mind that more fields could be passed. Also, it could happen that none of those fields is passed, meaning that the query won't have .sort().
My question: How can I create dynamically a query with Node's MongoDB driver? Is there a query builder or something similar?

I've found that most cases are unique regarding passed data, so building query objects varies from project to project.
So first ideas was to create middleware for express (in my case), that would parse query arguments into objects that are valid for query.
mongo-native can use as chained options to cursor, as well as in object:
Chained:
items.find({ type: 'location' }).sort({ title: 1 }).limit(42).toArray(function(err, data) {
// ...
});
Non-chained:
items.find({ type: 'location' }, { sort: { title: 1 }, limit: 42 }).toArray(function(err, data) {
// ...
});
As you can see Non-Chained can accept everything as object, while chained returns cursor after every method and can be reused. So generally you have two options:
For Chained:
var cursor = items.find({ type: 'location' });
if (sort) {
cursor.sort(sort);
}
cursor.toArray(function(err, data) {
// ...
});
For Non-Chained:
var options = { };
if (sort) {
options.sort = sort;
}
items.find({ type: 'location' }, options).toArray(function(err, data) {
// ...
});
It is important to remember that any data from query have to be validated and parsed properly. As well if you are developing API (for example), and will decide to change the way sorting arguments are passed or will want to add new way, then making middleware (in express.js) for parsing this data - is the way to go.
Example for pagination:
function pagination(options) {
return function(req, res, next) {
var limit = options.limit ? options.limit : 0;
var skip = 0;
if (req.query.limit) {
var tmp = parseInt(req.query.limit);
if (tmp != NaN) {
limit = tmp;
}
}
if (req.query.skip) {
var tmp = parseInt(req.query.skip);
if (tmp != NaN && tmp > 0) {
skip = tmp;
}
}
if (options.max) {
limit = Math.min(limit, options.max);
}
if (options.min) {
limit = Math.max(limit, options.min);
}
req.pagination = {
limit: limit,
skip: skip
};
next();
}
}
Usage:
app.get('/items', pagination({
limit: 8, // by default will return up to 8 items
min: 1, // minimum 1
max: 64 // maximum 64
}), function(req, res, next) {
var options = {
limit: req.pagination.limit,
skip: req.pagination.limit
};
items.find({ }, options).toArray(function(err, data) {
if (!err) {
res.json(data);
} else {
next(err);
}
});
});
And url examples:
http://example.com/items
http://example.com/items?skip=64
http://example.com/items?skip=256&limit=32
So it is the way to develop well flexible framework, which does not creates any rules of how things have to be coded as well as solving your challenge.

Related

Construct MongoDB query from GraphQL request

Let's say we query the server with this request, we only want to get the following user's Email, My current implementation requests the whole User object from the MongoDB, which I can imagine is extremely inefficient.
GQL
{
user(id:"34567345637456") {
email
}
}
How would you go about creating a MongoDB filter that would only return those Specified Fields? E.g,
JS object
{
"email": 1
}
My current server is running Node.js, Fastify and Mercurius
which I can imagine is extremely inefficient.
Doing this task is an advanced feature with many pitfalls. I would suggest starting building a simple extraction that read all the fields. This solution works and does not return any additional field to the client.
The pitfalls are:
nested queries
complex object composition
aliasing
multiple queries into one request
Here an example that does what you are looking for.
It manages aliasing and multiple queries.
const Fastify = require('fastify')
const mercurius = require('mercurius')
const app = Fastify({ logger: true })
const schema = `
type Query {
select: Foo
}
type Foo {
a: String
b: String
}
`
const resolvers = {
Query: {
select: async (parent, args, context, info) => {
const currentQueryName = info.path.key
// search the input query AST node
const selection = info.operation.selectionSet.selections.find(
(selection) => {
return (
selection.name.value === currentQueryName ||
selection.alias.value === currentQueryName
)
}
)
// grab the fields requested by the user
const project = selection.selectionSet.selections.map((selection) => {
return selection.name.value
})
// do the query using the projection
const result = {}
project.forEach((fieldName) => {
result[fieldName] = fieldName
})
return result
},
},
}
app.register(mercurius, {
schema,
resolvers,
graphiql: true,
})
app.listen(3000)
Call it using:
query {
one: select {
a
}
two: select {
a
aliasMe:b
}
}
Returns
{
"data": {
"one": {
"a": "a"
},
"two": {
"a": "a",
"aliasMe": "b"
}
}
}
Expanding from #Manuel Spigolon original answer, where he stated that one of the pitfalls of his implementation is that it doesn't work on nested queries and 'multiple queries into one request' which this implementation seeks to fix.
function formFilter(context:any) {
let filter:any = {};
let getValues = (selection:any, parentObj?:string[]) => {
//selection = labelSelection(selection);
selection.map((selection:any) => {
// Check if the parentObj is defined
if(parentObj)
// Merge the two objects
_.merge(filter, [...parentObj, null].reduceRight((obj, next) => {
if(next === null) return ({[selection.name?.value]: 1});
return ({[next]: obj});
}, {}));
// Check for a nested selection set
if(selection.selectionSet?.selections !== undefined){
// If the selection has a selection set, then we need to recurse
if(!parentObj) getValues(selection.selectionSet?.selections, [selection.name.value]);
// If the selection is nested
else getValues(selection.selectionSet?.selections, [...parentObj, selection.name.value]);
}
});
}
// Start the recursive function
getValues(context.operation.selectionSet.selections);
return filter;
}
Input
{
role(id: "61f1ccc79623d445bd2f677f") {
name
users {
user_name
_id
permissions {
roles
}
}
permissions
}
}
Output (JSON.stringify)
{
"role":{
"name":1,
"users":{
"user_name":1,
"_id":1,
"permissions":{
"roles":1
}
},
"permissions":1
}
}

Mongoose - CastError Cast to string failed for value "Object"

I have Mongoose CastError issue. I made a nodeJs API. At the specific route, it returns data appended with some other data. I saw many fixes available here but my scenario is different.
Here is my model and the problem occurs at fields property.
const deviceSchema = new Schema({
device_id: { type: String, required: true },
user_id: { type: Schema.Types.ObjectId, ref: 'User', require: true },
location_latitude: { type: String, default: '0' },
location_longitude: { type: String, default: '0' },
fields: [{ type: String }],
field_id: { type: Schema.Types.ObjectId, ref: 'Field', required: true },
timestamp: {
type: Date,
default: Date.now,
},
});
and my controller is
exports.getAllDevices = async (req, res) => {
try {
let devices = await Device.find({})
.sort({
timestamp: 'desc',
})
.populate('user_id', ['name']);
// Let us get the last value of each field
for (let i = 0; i < devices.length; i++) {
for (let j = 0; j < devices[i].fields.length; j++) {
if (devices[i].fields[j] !== null && devices[i].fields[j] !== '') {
await influx
.query(
`select last(${devices[i].fields[j]}), ${devices[i].fields[j]} from mqtt_consumer where topic = '${devices[i].device_id}'`
)
.then((results) => {
************** Problem occurs here **************
if (results.length > 0) {
devices[i].fields[j] = {
name: devices[i].fields[j],
last: results[0].last,
};
} else {
devices[i].fields[j] = {
name: devices[i].fields[j],
last: 0,
};
}
************** Problem occurs here **************
});
}
}
}
// Return the results
res.status(200).json({
status: 'Success',
length: devices.length,
data: devices,
});
} catch (err) {
console.log(err);
res.status(500).json({
error: err,
});
}
};
It actually gets data from InfluxDB and appends it to fields property which was fetched from MongoDB as mentioned in my model. But it refused to append and CastError occurs.
After addition, it will look like this
I can't resolve this error after trying so many fixes. I don't know where I'm wrong. Please suggest to me some solution for this.
I can see you are not using devices variable as Mongoose Document. devices is an array of Documents.
I would like to suggest you to use lean() function to convert from Document to plain JavaScript object like
let devices = await Device.find({})
.sort({
timestamp: 'desc',
})
.populate('user_id', ['name'])
.lean();

Express JS unlimited query parameter function?

I'm trying to set an unlimited query parameter in express js.But I couldn't figure out how should I implement that in my code. I'm using MongoDB aggeration
I want to build unlimited facets searched with multiple $match stage
Which works like this:
'http://localhost:4000/search?text=mango'
'http://localhost:4000/search?text=mango&key=brand&value=rasna' //unlimited facets.
'http://localhost:4000/search?text=mango&key=brand&value=rasna&key=color&value=yellow' //unlimited facet parameters
Here's my code to do this:
app.get("/search", async(request, response) => {
try {
const textsearch = request.query.text;
var keystore = request.query.key; //storing `key` in 'keystore'
var valuestore = request.query.value; //storing `value` in `valuestore`
if (keystore, valuestore) {
facetjson = [
{
'$match': {
[keystore]: `${valuestore}` //Storing key and value in $match
}
}
]
const Pipeline = [{
'$search': {
'text': {
'query': `${textsearch}`,
'path': 'title',
}
}
},
{
'$limit': 5
}
]
//Pushing 'facetjson' array into Pipeline array to make a filtered search possible.
const newitem = insert(Pipeline, Pipeline.length - 1, facetjson)
let result = collection.aggregate(newitem).toArray();
response.send(result);
} else {
const Pipeline = [{
'$search': {
'text': {
'query': `${textsearch}`,
'path': 'title',
}
}
},
{
'$limit': 5
}
]
let result = collection.aggregate(Pipeline).toArray();
response.send(result);
};
} catch (error) {
response.status(500).send({ message: error.message });
}
})
(JSFIDDLE code Example)[https://jsfiddle.net/divyanshuking/z0vo589e/]
==> I know that I've to pass $match in the Pipeline array each time for single Key , Value Pair. Doing many google searches I've figured out that I've to use the Rest Parameter (...keystore,...valuestore). But I didn't know how to implement this. Have you guys any better idea to do solve this problem? Pls help me:
Why don’t you use forEach or something
function endPoint (req, res) {
const queriesFound ={}
req.query.forEach(query=>{
queriesFound[query]=query;
}
QueriesFound will be an object
{ “Name”:”Name”, “AnotherParam”:”AnotherParam” }
}
//QueriesFound will be an object
{
“Name”:”Name”,
“AnotherParam”:”AnotherParam”
}
Your request URL has a wrong structure for query parameters. If you want to pass multiple kay/value pairs in URL, the correct structure is like this:
'http://localhost:4000/search?text=mango&brand=rasana&color=yellow
This code should work with this URL structure:
app.get("/search", async(request, response) => {
try {
//We need "search pipeline stage" in all conditions. whether we got a key/value pair in query or not.
//so we use "search stage" when declare pipeline array;
let pipeline = [{
'$search': {
'text': {
'query': `${request.query.text}`,
'path': 'title',
}
}
}];
//If there are keys/values pairs in the query parameters, we add match stage to our pipeline array;
if(request.query) {
let match = {}, hasMatchSatge = false;
for(let item in request.query){
if(item !=== 'text'){
match[item] = request.query[item];
hasMatchStage = true;
}
}
if(hasMatchStage) pipeline.push({'$match': match});
}
//Finally, we add our "limit stage" to the pipeline array;
pipeline.push({'$limit' : 5});
let result = collection.aggregate(pipeline).toArray();
response.status(200).send(result);
} catch (error) {
response.status(500).send({ message: error.message });
}
})

SQL Tedious Loop through object of array and execute insert statement function error Request is not a constructor

I'm new to Js, and would appreciate some help.
Currently, via tedious js, I've written a sql function that passes an insert statement that loops through an array of objects and inserts each property in each object in array to SQL.
To do this, I've written a loop to iterate through the array, and insert each property as a new entry into sql.
However, when i try to execute the function after the connect, the function returns error Request is not a constructor
Here is the full code below. Is there some scope issue here, or am I not correctly handling the tedious js events properly or is there some sort of issue with my for loop?
var jsonArray = [];
let jsonobj = {
test: "1",
test2: "2"
}
let jsonobj1 = {
test: "23",
test2: "54"
}
jsonArray.push(jsonobj)
jsonArray.push(jsonobj1)
jsonArray.push(jsonobj)
var config = {
server: '123', //update me
authentication: {
type: 'default',
options: {
userName: '123', //update me
password: '1234' //update me
}
},
options: {
// If you are on Microsoft Azure, you need encryption:
//encrypt: true,
requestTimeout: 30 * 1000,
trustServerCertificate: true,
database: 'db', //update me
rowCollectionOnRequestCompletion: true,
rowCollectionOnDone: true
}
};
var connection = new Connection(config);
connection.on('debug', function(err) { console.log('debug:', err);})
connection.on('connect', function(err) {
});
for (var i = 0; i < jsonArray.length; i++){
var sql = `insert into store (storekey,ip,port) values ( \'${jsonArray[i].test2}\' , '2' , '6');`
executeStatement1(sql)
}
var Request = require('tedious').Request;
var TYPES = require('tedious').TYPES;
function executeStatement1(sql) {
request = new Request(sql, function(err) {
if (err) {
console.log(err);}
});
request.on('row', function(columns) {
columns.forEach(function(column) {
if (column.value === null) {
console.log('NULL');
} else {
console.log(" success " + column.value);
}
});
});
connection.execSql(request);
}

Mongodb asynchronous call?

How do you avoid asynchronous call in this case I have tried everything each time the res.render is too early and is missing objects from state. playingCollection is a mongodb collection.
var state = [];
playingCollection.find({},function(err, companies) {
companies.each(function(err,company){
if (company !== null) {
var obj = company.playername;
state.push(obj);
}
res.render('index', { title: 'Demo', error: req.query.error, players: state, head: 'Currently playing:'});
state = [];
return;
});
});
Here is one approach to handle this using toArray after the call to find():
playingCollection.find({}).toArray(function(err, companies) {
if(err) {
res.render('error', {whatever});
return;
}
var state = [];
var i;
for(i=0; i<companies.length; i++) {
state.push(companies[i].playername);
}
res.render('index', { title: 'Demo', error: req.query.error, players: state, head: 'Currently playing:'});
});
Below is a simple way using the Cursor object returned from find. It relies only on the fact that the each call will pass a null when the list is exhausted.
var state = [];
playingCollection.find({}).each(function(err, company) {
if (company !== null) {
state.push(company.playername);
} else {
res.render('index', { title: 'Demo', error: req.query.error, players: state,
head: 'Currently playing:'});
return;
}
});
If you knew you were only interested in one field from the collection, you should also limit the results using the optional projection parameter:
playingCollection.find({}, { playername: 1 }).each(....);
In the above, it would return only the playername and _id field for each document.

Categories

Resources