MongoDB Aggregation not updating my collection - javascript

I'm trying to run an aggregation query in Mongo using their nodejs driver, that takes some of my fields and adds/finds averages etc. I built the aggregation in Mongo Cloud and exported it to node, but when I run the code, I get the following responses from Mongo:
this.res could not be serialized.
steps.update_mongo.res:null
Here's the code (history field is an array of objects):
const agg = [
{
'$addFields': {
'avgGrowth': {
'$ceil': {
'$avg': '$history.growth'
}
},
'avgDecline': {
'$ceil': {
'$avg': '$history.decline'
}
}
}
},
{
'$merge': {
'into': {'db':'mydb', 'coll':'test'},
'on': '$_id'
},
}
];
const coll = await db.collection('test');
this.res = await coll.aggregate(agg, async(cmdErr, result) => {
await assert.equal(null, cmdErr);
});

this.res = await coll.aggregate(agg, async(cmdErr, result) => {
await assert.equal(null, cmdErr);
});
Is an incorrect syntax. You either provide a callback OR use await, not both.
Try using is as follow :
this.res = await coll.aggregate(agg);

Related

Rename database field within array in MongoDB

I need to change a few database fields in my backend controller before returning the object to the frontend.
Currently, I am doing it in the front end like this with my returned object:
for (let contribution of contributions) {
contribution["title"] = "You Added One"
contribution["launchName"] = contribution.name
contribution["launchId"] = contribution._id
contribution["date"] = contribution.addedAt
contribution["content"] = contribution.description
}
But I am now trying to do this work in the backend using Mongo.
This is my controller:
const Launch = require('../models/launch')
const User = require('../models/user')
async function getRecentActivityByUserId (req, res) {
const { userId } = req.params
const user = await User.findOne({ userId }).lean() || []
const contributions = await Launch.find({ _id: { $in: user.contributions } })
return res.status(200).send(contributions.reverse())
}
So this correctly returns an object to the frontend but I still need to change the database field names.
So I tried this:
async function getRecentActivityByUserId (req, res) {
let recents = []
const { userId } = req.params
const user = await User.findOne({ userId }).lean() || []
const contributions = await Launch.find({ _id: { $in: user.contributions } }).aggregate([
{
$addFields: {
plans: {
$map:{
input: "$launch",
as: "l",
in: {
title: "You Added One",
launchName: "$$l.name",
launchId: "$$l._id",
date: "$$l.addedAt",
content: "$$l.description",
}
}
}
}
},
{
$out: "launch"
}
])
return res.status(200).send(contributions.reverse())
}
The above throws an error saying that I .aggregrate is not a function on .find. Even if I remove the .find, the object returned is just an empty array so I'm obviously not aggregating correctly.
How can I combine .find with .aggregate and what is wrong with my .aggregate function??
I also tried combining aggregate with find like this and get the error Arguments must be aggregate pipeline operators:
const contributions = await Launch.aggregate([
{
$match: {
_id: { $in: user.contributions }
},
$addFields: {
plans: {
$map:{
input: "$launch",
as: "l",
in: {
title: "You Added a Kayak Launch",
launchName: "$$l.name",
launchId: "$$l._id",
date: "$$l.addedAt",
content: "$$l.description",
}
}
}
}
},
{
$out: "launch"
}
])
EDIT: Just realized that I have the word plans in the aggregate function and that is not relevant to my code. I copied this code from elsewhere so not sure what the value should be.
I figured it out. This is the solution:
async function getRecentActivityByUserId (req, res) {
let recents = []
const { userId } = req.params
const user = await User.findOne({ userId }).lean() || []
const contributions = await Launch.aggregate([
{
$match: {
_id: { $in: user.contributions }
}
},
{
$addFields: {
title: "You Added One" ,
launchName: "$name",
launchId: "$_id",
date: "$addedAt",
content: "$description"
}
}
])
if(contributions) {
recents = recents.concat(contributions);
}
return res.status(200).send(recents.reverse())
}
The actual problem from the question was a small syntax error which has been noted and corrected in the self-reported answer here.
I noted in the comments there that the current approach of issuing two separate operations (a findOne() followed by an aggregate() that uses the results) could be simplified into a single query to the database. The important thing here is that you will $match against the first collection (users or whatever the collection name is in your environment) and then use $lookup to perform the "match" against the subsequent launches collection.
Here is a playground demonstrating the basic approach. Adjust as needed to the specifics of your environment.

Getting "MongoExpiredSessionError: Cannot use a session that has ended" when using forEach to update document but why?

I'm just trying to upsert a key value pair i.e. {mark : done} to my Mongo database using id that I have stored in an array.
But for the life of me I can't figure out why if I use a for loop it works fine. But if I use a forEach loop, it gives me a "Cannot use a session that has ended" Error.
I'm hoping someone SO assist in explaining why this happens
Boiler Plate Code
import { MongoClient } from "mongodb";
const uri = "mongodb://localhost:27018/";
const client = new MongoClient(uri);
Function with "for loop" implementation
async function updateMongo1() {
const db = client.db("testDB");
const coll = db.collection("testCollection");
await client.connect();
try {
let array = [
"2021-10-18",
"2021-10-17",
"2021-10-16",
"2021-10-15",
"2021-10-14",
];
for (let i = 0; i < array.length; i++) {
await coll.updateOne(
{ _id: array[i] },
{ $set: { mark: "done" } },
{ upsert: true }
);
}
} catch (e) {
console.error(e.message);
} finally {
await client.close();
}
}
Function with "forEach" implementation
async function updateMongo2() {
const db = client.db("testDB");
const coll = db.collection("testCollection");
await client.connect();
try {
let array = [
"2021-10-18",
"2021-10-17",
"2021-10-16",
"2021-10-15",
"2021-10-14",
];
array.forEach(async (element) => {
await coll.updateOne(
{ _id: element },
{ $set: { mark: "done" } },
{ upsert: true }
);
});
} catch (e) {
console.error(e.message);
} finally {
await client.close();
}
}
running different implementations yields different results.
(async () => {
//await updateMongo1(); //-> this works
await updateMongo2(); //-> this gives "MongoExpiredSessionError: Cannot use a session that has ended" what gives??
})();
Please let me know why this happens?
The element variable is an array e.g. ["2021-10-18"]. In the second implementation, you were meant to access element[0] but forgot to so you put an array into the _id field.
I don't know if that's what's specifically causing your error but it is an error in the implementation.

oracledb node for node-js (several queries one after the other). The other take result as binds parameter

I am stuck at my problem because I am really new to oracle-db for node-js and JavaScript in general.
I wrote API-endpoints with JavaScript and each endpoint does an oracledb “select query”. However, my colleagues need only one endpoint which gives all results of all oracle select-queries as one JSON object. All queries are dependent on the first query, i.e. the result of the first query has to be used as a bind-parameter for the second query and so forth.
In this js-file I tried to execute two oracle-sql queries (one after the other) and use one value of the result-JSON-object of the first query as a parameter for the second query and give all results as response for the API client. But that does not work. What am I doing wrong here? With only one sql and therefore only one result it works. Thanks in advance!
Here is my code I wrote:
const oracledb = require('oracledb');
const config = require('../config/config')
oracledb.autoCommit = true
oracledb.fetchAsString = [ oracledb.DATE, oracledb.NUMBER ]
module.exports= async (req, res) => {
let connection;
try {
var sql, binds, options, result, result2, time
connection = await oracledb.getConnection(config.oracledbconnection)
// Query the data
sql = `SELECT FIRSTNr, SECONDNR,STARTTIME,MACHINE FROM MACHINELOGS WHERE Machine=:Machine AND ENDTIME > CURRENT_DATE -1 ORDER BY RUNNO ASC`;
binds = {Machine:req.params.machine}
options = {
outFormat: oracledb.OUT_FORMAT_OBJECT // query result format
//outFormat: oracledb.OBJECT // is the same as above // extendedMetaData: true, // get extra metadata
// prefetchRows: 100, // internal buffer allocation size for tuning
// fetchArraySize: 100 //internal buffer allocation size for tuning
};
result = await connection.execute(sql, binds, options);
// console.log("Metadata: ");
// console.dir(result.metaData, { depth: null });
// console.log("Query results: ");
// console.dir(result.rows, { depth: null });
//
// Show the date. The value of ORA_SDZT affects the output
sql = `SELECT TO_CHAR(CURRENT_DATE, 'DD-Mon-YYYY HH24:MI') AS CD FROM DUAL`;
time = await connection.execute(sql, binds, options);
// console.log("Current date query results: ");
// console.log(result2.rows[0]['CD']);
sql = `SELECT GRADE FROM NOTES WHERE SECONDNR=:Secondnr`
binds = {Secondnr:result.rows[0]['SECONDNR']}
result2 = await connection.execute(sql,binds,options);
options = {
outFormat: oracledb.OUT_FORMAT_OBJECT //
};
}
catch (err)
{
console.error(err);
}
finally
{
if (connection)
{
try {
await connection.close();
}
catch (err) {
console.error(err);
}
}
}
res.send(result,result2)
}
Take it back to basics. This works:
'use strict';
process.env.ORA_SDTZ = 'UTC';
const oracledb = require('oracledb');
const dbConfig = require('./dbconfig.js');
if (process.platform === 'darwin') {
oracledb.initOracleClient({libDir: process.env.HOME + '/Downloads/instantclient_19_8'});
}
let sql, options, result1, result2;
options = { outFormat: oracledb.OUT_FORMAT_OBJECT };
async function run() {
let connection;
try {
connection = await oracledb.getConnection(dbConfig);
result1 = await connection.execute(
`select 1 as myval from dual`,
[], options);
console.dir(result1.rows, { depth: null });
result2 = await connection.execute(
`select sysdate from dual where 1 = :bv`,
{bv: result1.rows[0].MYVAL},
options);
console.dir(result2, { depth: null });
} catch (err) {
console.error(err);
} finally {
if (connection) {
try {
await connection.close();
} catch (err) {
console.error(err);
}
}
}
}
run();
Then try with your datatypes.
I'm not sure what you want to do with returning 'all results'. You can build up any JS object and convert to and from JSON easily with JSON.stringify and JSON.parse.
For efficiency, you could review whether you can do all your 'queries' in a single SQL statement.

Asynchronous verification within the .map function

I am developing the backend of an application using Node JS, Sequelize and Postgres database.
When the course is registered, the user must inform which organizations, companies and teachers will be linked to it.
The organization IDs are passed through an array to the backend, I am trying to do a check to make sure that the passed IDs exist.
What I've done so far is this:
const { organizations } = req.body;
const organizationsArray = organizations.map(async (organization) => {
const organizationExists = await Organization.findByPk(organization);
if (!organizationExists) {
return res
.status(400)
.json({ error: `Organization ${organization} does not exists!` });
}
return {
course_id: id,
organization_id: organization,
};
});
await CoursesOrganizations.bulkCreate(organizationsArray);
This link has the complete controller code, I believe it will facilitate understanding.
When !OrganizationExists is true, I am getting the return that the organization does not exist. The problem is when the organization exists, I am getting the following message error.
The Array.map() is returning an array of promises that you can resolve to an array using Promise.all(). Inside the map you should use throw new Error() to break out of the map - this error will be raised by Promise.all() and you can then catch it and return an error to the client (or swallow it, etc).
This is a corrected version of your pattern, resolving the Promise results.
const { organizations } = req.body;
try {
// use Promise.all to resolve the promises returned by the async callback function
const organizationsArray = await Promise.all(
// this will return an array of promises
organizations.map(async (organization) => {
const organizationExists = await Organization.findByPk(organization, {
attributes: ['id'], // we only need the ID
raw: true, // don't need Instances
});
if (!organizationExists) {
// don't send response inside the map, throw an Error to break out
throw new Error(`Organization ${organization} does not exists!`);
}
// it does exist so return/resolve the value for the promise
return {
course_id: id,
organization_id: organization,
};
})
);
// if we get here there were no errors, create the records
await CoursesOrganizations.bulkCreate(organizationsArray);
// return a success to the client
return res.json({ success: true });
} catch (err) {
// there was an error, return it to the client
return res.status(400).json({ error: err.message });
}
This is a refactored version that will be a bit faster by fetching all the Organizations in one query and then doing the checks/creating the Course inserts.
const { Op } = Sequelize;
const { organizations } = req.body;
try {
// get all Organization matches for the IDs
const organizationsArray = await Organization.findAll({
attributes: ['id'], // we only need the ID
where: {
id: {
[Op.in]: organizations, // WHERE id IN (organizations)
}
},
raw: true, // no need to create Instances
});
// create an array of the IDs we found
const foundIds = organizationsArray.map((org) => org.id);
// check to see if any of the IDs are missing from the results
if (foundIds.length !== organizations.length) {
// Use Array.reduce() to figure out which IDs are missing from the results
const missingIds = organizations.reduce((missingIds, orgId) => {
if (!foundIds.includes(orgId)){
missingIds.push(orgId);
}
return missingIds;
}, []); // initialized to empty array
throw new Error(`Unable to find Organization for: ${missingIds.join(', ')}`);
}
// now create an array of courses to create using the foundIds
const courses = foundIds.map((orgId) => {
return {
course_id: id,
organization_id: orgId,
};
});
// if we get here there were no errors, create the records
await CoursesOrganizations.bulkCreate(courses);
// return a success to the client
return res.json({ success: true });
} catch (err) {
// there was an error, return it to the client
return res.status(400).json({ error: err.message });
}
If you have an array of Ids and you want to check if they exist you should you use the (in) operator, this makes it so that you are hitting the DB only once and getting all the records in one hit (instead of getting them one by one in a loop), after you get these records you can check their lengths to determine if they all exist or not.
const { Op } = require("sequelize");
let foundOrgs = await Organization.findAll({
where: {
id: {
[Op.in]: organizationsArray,
}
}
});

MongoDB&JavaScript heap out of memory

The data size in telemetry table is HUGE. So, I get "JavaScript heap out of memory" error.
How do I overcome that error?
const aloUrl = `mongodb://${userName}:${pwd}#${host}:${port}/${dbName}`;
MongoClient.connect(aloUrl, function(err, client) {
if (err) {
return console.log('ERROR:: ', err);
}
console.log("INFO:: OK");
const db = client.db(dbName);
var arr = db.collection('endpoint').find({provider:"KMR"}).map(e => e._id).toArray((err, result) => {
if (err){
console.log("ERROR", err)
}
var son = db.collection('telemetry').find({endpoint: {$in: result}}).toArray().then(arr =>{
console.log("Let's start to party")
for (let i = 0; i < 10; i++) {
console.log("\t" + arr[i]._id)
}
}).catch(e => {
console.log(`ERROR::${e}`)
})
})
});
From the mongodb docs,
The toArray() method returns an array that contains all the documents
from a cursor. The method iterates completely the cursor, loading all
the documents into RAM and exhausting the cursor.
Thus instead of calling toArray, you should use the next or forEach (or some other method which doesn't load everything at once to RAM), to iterate through the elements one by one.
For example, to print all the documents in your telemetry collection ONE BY ONE, you can do this,
db.collection('telemetry')
.find({
endpoint: {
$in: result
}
})
.forEach((document) => {
console.log(document)
});
I would suggest you to use forEach instead of toArrayin order to fetch and load w/o exhaustion.
For huge data it's always advised to stream (it's achieved by cursor in mongo).
also
$lookup is new in MongoDB 3.2. It performs a left outer join to an unsharded collection in the same database to filter in documents from the “joined” collection for processing.
You can have a look at aggregation pipeline for mongo
updating your code with aggregate.
var MongoClient = require('mongodb').MongoClient;
// Connection URL
const aloUrl = `mongodb://${userName}:${pwd}#${host}:${port}/${dbName}`;
MongoClient.connect(aloUrl, function (err, client) {
console.log("INFO:: OK");
const db = client.db(dbName);
const col = db.collection('endpoint');
var cursor = col.aggregate([
{
$match: {provider: "KMR"}
},
{
$lookup:
{
from : "telemetry",
localField : "_id",
foreignField: "endpoint",
as : "telemetry"
}
}
]);
console.log("Let's start to party")
cursor.on('data', function (data) {
console.log("\t" + data._id)
});
cursor.on('end', function () {
console.log("Done ");
});
});

Categories

Resources