I have a difficult final-price calculating issues with Mongoose Queries. I have three functions. This one get's an ID and external quantity, finds the material price, and finally mulitply it.
async function getPrivatePrice(id, quantity) {
try {
let piece = await Piece.findOne({'_id': id}).select({"meterage": 1, "material": 1, "_id" : 0}).then((piece) => {
return piece;
}).catch((err) => {
console.log(err);
});
let price = await Material.findOne({'_id': piece.material}).select({"_id" : 0, "price" : 1}).then((price) => {
return price;
}).catch((err) => {
console.log(err);
});
let final_raw = piece.meterage * price.price;
let final_price = final_raw * quantity;
return { final_price : final_price }
} catch(err) {
console.log(err);
}
}
I'm really having difficulties with the second part of the code, because I need to get many id's from another function, so, I decided to use a forEach to call this function, and get all these prices and put them in an array, but I don't know how to return it.
async function getFinalCost(pieces) {
let total_cost = [];
try {
pieces.forEach(async (response) => {
let query = await PieceController.getPrivatePrice(response["_id"], response["quantity"]).then((piecePrice) => {
return piecePrice["final_price"];
}).catch((err) => {
console.log(err);
});
total_cost.push(query);
});
return total_cost;
} catch(err) {
console.log(err);
}
}
I like to know how to get that array here:
function createProduct(req, res) {
let params = req.body;
let product = new Product();
product.name = params.name;
product.reference = params.reference;
product.pieces = params.pieces;
product.color = params.color;
let piece_price = getFinalCost(product.pieces);
console.log(piece_price); //Outputs Promise { { total_array: [] } }
}
You clearly need to refactor your code. What I would do is use something like bluebird to run a map over the pieces list like so.
const Promise = require('bluebird');
async function getMoreProductInfo(id) {
try{
const productInfo = await Piece.findOne({'_id': id}).select({"meterage": 1, "material": 1, "_id" : 0});
const materialInfo = await Material.findOne({'_id': productInfo.material}).select({"_id" : 0, "price" : 1});
return { productInfo, materialInfo };
} catch(e) {
throw e;
}
}
async function calculatePrices(pieces) {
try {
const priceList = await Promise.map(pieces, async (piece) => {
const { productInfo, materialInfo } = await getMoreProductInfo(piece._id);
return materialInfo.price * piece.quantity * productInfo.meterage;
});
return priceList
} catch (e) {
throw e;
}
}
I clearly cannot test this code as is..but I think it should be pretty straightforward to tell whats going on(I think)
Related
While trying to get the rows from resultSet using toQueryStream() I can only find the rows in console. But not able to return value using promises as toQueryStream() function uses eventListener to resolve rows. My code is given below please suggest to get the row values.
function getPosCounter() {
return oracledb.getConnection(kcdConnStr)
.then(function (conn) {
return conn.execute(`BEGIN GETPOSCOUNTER(:CV_1); END;`, { // EXECUTE ORACLE PROCEDURE
CV_1: { dir: oracledb.BIND_OUT, type: oracledb.CURSOR } //CURSOR DEFINED FOR OUT PARAM
})
.then((result) => {
var resRows = new Array();
var resultSet = result.outBinds.CV_1; //RESULT SET FOR OUTPUT
var queryStream = resultSet.toQueryStream(); //QUERYSTREAM INITIALIZED FOR CURSOR VALUES
var consumeStream = new Promise((resolve, reject) => {
queryStream.on('data', function (row) {
console.log(row);
});
queryStream.on('error', reject);
queryStream.on('close', resolve);
})
.then(rows => {
console.dir(rows); //RETURN ROW VALUES
});
})
.catch((err) => {
conn.close();
console.error(err);
return 'failure';
})
});
}
Before this issue I faced, I was not aware about the nodeJs-Stream feature which is quiet difficult to understand for me. So I found a documentation on Oracle Community about getting the rows from result set. Make it possible in the below way.
function getPosCounter() {
return oracledb.getConnection(kcdConnStr)
.then(function (conn) {
return conn.execute(`BEGIN USP_POSCOUNTER(:CV_1); END;`, { // EXECUTE ORACLE PROCEDURE
CV_1: { dir: oracledb.BIND_OUT, type: oracledb.CURSOR } //CURSOR DEFINED FOR OUT PARAM
})
.then((result) => {
var resRows = [];
var resultSet = result.outBinds.CV_1; //RESULT SET FOR OUTPUT
var queryStream = resultSet.toQueryStream(); //QUERYSTREAM INITIALIZED FOR CURSOR VALUES
return consumeStream = new Promise((resolve, reject) => {
queryStream.on('data', (row) => {
resRows.push(row); //STORE ROWS IN TO BLANK ARRAY
});
queryStream.on('error', reject);
queryStream.on('close', () => {
resolve(resRows); //RETURN ON RESOLVING ALL THE ROWS
conn.close();
});
});
})
.catch((err) => {
conn.close();
//console.error(err);
return 'failure';
})
});
}
Instead of fiddling with promises and streams, you could code like this:
const oracledb = require('oracledb');
const dbConfig = require('./dbconfig.js');
if (process.platform === 'darwin') {
oracledb.initOracleClient({libDir: process.env.HOME + '/Downloads/instantclient_19_8'});
}
async function run() {
let connection;
try {
connection = await oracledb.getConnection(dbConfig);
await connection.execute(
`create or replace procedure usp_poscounter(p out sys_refcursor) as
begin
open p for select postal_code from locations order by location_id;
end;`);
const r = await getPosCounter();
console.dir(r, { depth: null });
} catch (err) {
console.error(err);
} finally {
if (connection) {
try {
await connection.close();
} catch (err) {
console.error(err);
}
}
}
}
async function getPosCounter() {
let conn;
try {
conn = await oracledb.getConnection(dbConfig); // should really use a pool instead
const result = await conn.execute(
`BEGIN USP_POSCOUNTER(:CV_1); END;`,
{ CV_1: { dir: oracledb.BIND_OUT, type: oracledb.CURSOR } }
);
let row, outRows = [];
while ((row = await result.outBinds.CV_1.getRow())) {
outRows.push(row);
}
// With node-oracledb 5.2 you will be able to simplify this loop to be the
// following one line. Note with any node-oracledb version, if the number
// of rows is large, then you will want your architecture to deal with
// batches of rows insteading of returning one big array.
//
// const outRows = await result.outBinds.CV_1.getRows(); // empty arg gets all rows
return (outRows);
} catch (err) {
console.error(err);
} finally {
if (conn) {
try {
await conn.close();
} catch (err) {
console.error(err);
}
}
}
}
run();
I am using the below function within post method. The async-await is used but in transferAmount totalBalance is not updated when I call the function inside the post route. The return from the function is not proper. I need guidance so that it returns the object with updated values.
async function transferAmount(fromAccountId, toAccountId, amount) {
const session = await mongoose.startSession();
const options= {session, new:true}
let sourceAccount, destinationAccount;
const BASICSAVNGS_MAX_BALANCE = 1500;
const result = {
newSrcBalance: 0,
totalDestBalance:0,
transferedAt:moment.now()
}
try {
session.startTransaction();
const source= await Account.findByIdAndUpdate(
{_id:sourceAccount._id},
{$inc:{balance:-amount}},
options
);
if(source.balance <0) {
// Source account should have the required amount for the transaction to succeed
const errorMessage='Insufficient Balance with Sender:';
throw new ErrorHandler(404,errorMessage);
}
const destination = await Account.findByIdAndUpdate(
{_id:destinationAccount._id},
{$inc:{balance:amount}},
options
);
// The balance in ‘BasicSavings’ account type should never exceed Rs. 50,000
if((destination.accountType.name === 'BasicSavings') && (destination.balance > BASICSAVNGS_MAX_BALANCE)) {
const errorMessage=`Recepient's maximum account limit reached`;
throw new ErrorHandler(404,errorMessage);
}
await session.commitTransaction();
result.transferedAt= moment.now() //*UPDATE THE TRANSFER TIMESTAMP
result.newSrcBalance = source.balance; //*UPDATE THE SOURCE BALANCE
session.endSession();
// finding total balance in destination account
await User.findById(destination.user.id, async function(err,user) {
if(err) {
const errorMessage=`Recepient not found!`;
console.log(err);
throw new ErrorHandler(404,errorMessage);
} else {
if(user.accounts) {
await Account.find({
'_id' :{$in:user.accounts}
}, function(err,userAccounts) {
totalDestBalance = userAccounts.reduce( (accumulator,obj) => accumulator+obj.balance,0);
result.totalDestBalance = totalDestBalance; //*UPDATE THE TOTAL BALANCE
console.log(result);
return result;
});
}
}
});
}
catch (error) {
// Abort transaction and undo any changes
await session.abortTransaction();
session.endSession();
throw new ErrorHandler(404,error);
} finally {
if(session) {
session.endSession();
}
}
}
module.exports = transferAmount;
Result of above function is
{
newSrcBalance: 940,
totalDestBalance: 1060,
transferedAt: 1594982541900
}
But inside the post request below it is {}
const result = await transferAmount(fromAccountId, toAccountId, amount);
You are not returning something inside the function.
User.findById - this receives a callback for returning something.
You can convert it as async/await syntax or have to resolve the result with promise.
Like below:
try {
const user = await User.findById(destination.user.id);
if (user.accounts) {
const userAccounts = await Account.find({ _id: { $in: user.accounts } });
totalDestBalance = userAccounts.reduce((accumulator, obj) => accumulator + obj.balance, 0);
result.totalDestBalance = totalDestBalance; //*UPDATE THE TOTAL BALANCE
console.log(result);
return result;
}
} catch (err) {
const errorMessage = `Recepient not found!`;
console.log(err);
throw new ErrorHandler(404, errorMessage);
}
Or:
return new Promise((resolve, reject) => {
User.findById(destination.user.id, async function(err, user) {
if (err) {
const errorMessage = `Recepient not found!`;
console.log(err);
reject(err);
} else {
if (user.accounts) {
await Account.find(
{
_id: { $in: user.accounts },
},
function(err, userAccounts) {
totalDestBalance = userAccounts.reduce((accumulator, obj) => accumulator + obj.balance, 0);
result.totalDestBalance = totalDestBalance; //*UPDATE THE TOTAL BALANCE
console.log(result);
resolve(result);
}
);
}
}
});
});
I may be wrong but i cannot see a return statement in you transferAmount function.
In my Node Express App, I want to get all comments for a lesson and modify each comment by adding a fullname field to each comment. For getting full name of a user, I have defined findFullNameByUserId function in UserController.js. I use findAllCommentsByLessonId() in CourseController.js as follows. However, when I'm using it, I always get an empty array. How can I use findFullNameByUserId in findAllCommentsByLessonId() so that fullname field can be added to each comment object?
CourseController.js
findAllCommentsByLessonId: async (courseId,lessonId,callback) => {
Course.find({_id: courseId}, function(err, course) {
if (err) {
callback(err, null);
} else {
const lesson = course[0].lessons.filter(l => l._id !== lessonId)
const comments = lesson[0].comments.map( async c => {
try{
const name = await UserController.findFullNameById(c.user)
return (
{
userId: c.user,
name: name,
content: c.content
}
)
}
catch(err){
console.log(err)
}
})
// console.log(comments) --> This always prints [{}]
callback(null, comments)
}
});
}
UserController.js
module.exports = {
findFullNameById: async (userId) => {
await User.find({_id: userId}, function(err, user) {
if (err) {
console.log(err)
} else {
return user[0].name+" "+( user[0].lname ? user[0].lname : "")
}
});
}
}
in CourseController.js either you can use async-await or you can use callback
async-await way :
findAllCommentsByLessonId: async (courseId,lessonId) => {
let course = await Course.findOne({_id: courseId});
if (course){
let lesson = course.lessons.find(l => l._id == lessonId);
let comments = [];
for(let comment of lesson.comments){
let name = await UserController.findFullNameById(comment.user);
comments.push({userId: comment.user,name: name,content: comment.content});
}
return comments;
}else{
return [];
}
}
callback way :
findAllCommentsByLessonId: (courseId,lessonId,callback) => {
Course.findOne({_id: courseId},function(err, course) {
if (err) {
callback(err, null);
} else {
let lesson = course.lessons.find(l => l._id == lessonId);
let comments = lesson.comments;
comments.map((comment)=>{
UserController.findFullNameById(comment.user).then(name=>{
return {userId: comment.user,name: name,content: comment.content};
});
});
callback(null, comments);
}
});
}
Start by dropping callbacks and actually using promises for await. You haven't specified what find is, but chances are it's a modern library supporting promises. So write
async function findAllCommentsByLessonId(courseId, lessonId) {
const [course] = Course.find({_id: courseId};
const lesson = course.lessons.find(l => l._id !== lessonId);
const comments = lesson.comments.map(async c => {
const name = await UserController.findFullNameById(c.user)
return {
userId: c.user,
name,
content: c.content
};
});
}
module.exports.findFullNameById = async (userId) => {
const [user] = await User.find({_id: userId});
return user.name + " " + (user.lname ? user.lname : "");
};
Then you'll notice that comments is not an array of users, but rather an array of promises for users, so wrap it in a await Promise.all(…) call.
As #SuleymanSah commented, I tried to use .populate and it worked well. I think this is the correct approach as for the exact reasons he's pointed out. The following is how I did it:
Lesson.findOne({ _id: lessonId }).
populate('comments.user').
exec(function(err, lesson) {
if (err) {
console.log(err);
return callback(err, null);
}
if (!lesson) {
console.log("No record found");
return callback(err, null);
}
return callback(null, lesson.comments);
});
let getProjects = function() {
try {
return axios.get('https://app.asana.com/api/1.0/projects/')
} catch (error) {
console.error(error)
}
}
let getTasks = function(project) {
try {
return axios.get('https://app.asana.com/api/1.0/projects/'+project+'/tasks')
} catch (error) {
console.error(error)
}
}
async function getAsanaData() {
let projects = await getProjects()
projects = projects.data.data
projects.map(async (project) => {
//project.attachments = []
let tasks = await getTasks(project.gid)
if(tasks != undefined){
tasks = tasks.data.data
project.tasks = tasks
//console.log(projects)
}
})
console.log(projects)
return projects
}
Promise.try(() => {
return getAsanaData();
}).then((result) => {
//console.log(util.inspect(result, {showHidden: false, depth: null}))
//var asanaData = safeJsonStringify(result);
//fs.writeFile("thing.json", asanaData);
})
.catch(err=>console.log(err))
In getAsanaData(), projects has a new value after project.tasks = tasks.
However, it's original value is printed by console.log(projects) before return projects.
This of course also means that the original value rather than the necessary new value will be returned.
What is the cause and how do I resolve this?
Try this:
async function getAsanaData() {
let projects = await getProjects()
return Promise.all(projects.data.data.map(async (project) => {
let tasks = await getTasks(project.gid)
project.tasks = !!tasks ? tasks.data.data : project.tasks
return project
}))
}
This will go through the async calls to getTasks and return for each the project. Then you should get them all resolved via Promise.all
I have a function that runs periodically, that updates the item.price of some Documents in my Prices Collection. The Price Collection has 100k+ items. The function looks like this:
//Just a helper function for multiple GET requests with request.
let _request = (urls, cb) => {
let results = {}, i = urls.length, c = 0;
handler = (err, response, body) => {
let url = response.request.uri.href;
results[url] = { err, response, body };
if (++c === urls.length) {
cb(results);
}
};
while (i--) {
request(urls[i], handler);
}
};
// function to update the prices in our Prices collection.
const update = (cb) => {
Price.remove({}, (err, remove) => {
if (err) {
return logger.error(`Error removing items...`);
}
logger.info(`Removed all items... Beginning to update.`);
_request(urls, (responses) => {
let url, response, gameid;
for (url in responses) {
id = url.split('/')[5].split('?')[0];
response = responses[url];
if (response.err) {
logger.error(`Error in request to ${url}: ${err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
logger.info(`Response body for ${id} is ${Object.keys(jsonResult).length}.`);
let allItemsArray = Object.keys(jsonResult).map((key, index) => {
return {
itemid: id,
hash_name: key,
price: jsonResult[key]
}
});
Price.insertMany(allItemsArray).then(docs => {
logger.info(`Saved docs for ${id}`)
}, (e) => {
logger.error(`Error saving docs.`);
});
}
}
if (cb && typeof cb == 'function') {
cb();
}
})
});
}
As you can see, to avoid iterating through 100k+ Documents, and updating each and every one of them separately, I delete them all at the beginning, and just call the API that gives me these Items with prices, and use InsertMany to Insert all of them into my Prices Collection.
This updating process will happen every 30 minutes.
But I just now realised, what if some user wants to check the Prices and my Prices Collection is currently empty because it's in the middle of updating itself?
The Question
So do I have to iterate through all of them in order to not delete it? (Remember, there are MANY documents to be updated every 30 mins.) Or is there another solution?
Here's a picture of how my Prices Collection looks (there are 100k docs like these, I just want to update the price property):
Update:
I have re-written my update function a bit and now it looks like this:
const update = (cb = null) => {
Price.remove({}, (err, remove) => {
if (err) {
return logger.error(`Error removing items...`);
}
logger.info(`Removed all items... Beginning to update.`);
_request(urls, (responses) => {
let url, response, gameid;
for (url in responses) {
gameid = url.split('/')[5].split('?')[0];
response = responses[url];
if (response.err) {
logger.error(`Error in request to ${url}: ${err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
logger.info(`Response body for ${gameid} is ${Object.keys(jsonResult).length}.`);
let allItemsArray = Object.keys(jsonResult).map((key, index) => {
return {
game_id: gameid,
market_hash_name: key,
price: jsonResult[key]
}
});
let bulk = Price.collection.initializeUnorderedBulkOp();
allItemsArray.forEach(item => {
bulk.find({market_hash_name: item.market_hash_name})
.upsert().updateOne(item);
});
bulk.execute((err, bulkers) => {
if (err) {
return logger.error(`Error bulking: ${e}`);
}
logger.info(`Updated Items for ${gameid}`)
});
// Price.insertMany(allItemsArray).then(docs => {
// logger.info(`Saved docs for ${gameid}`)
// }, (e) => {
// logger.error(`Error saving docs.`);
// });
}
}
if (cb && typeof cb == 'function') {
cb();
}
})
});
}
Notice the bulk variable now (Thanks #Rahul) but now, the collection takes ages to update. My processor is burning up and it literally takes 3+ minutes to update 60k+ documents. I honestly feel like the previous method, while it might delete all of them and then reinserting them, it also takes 10x faster.
Anyone?
From my experience (updating millions of mongo docs on a hourly basis), here's a realistic approach to very large bulk updates:
do all your API calls separately and write results in as bson into a file
invoke mongoimport and import that bson file into a new empty collection prices_new. Javascript, let alone high-level OO wrappers, are just too slow for that
rename prices_new -> prices dropTarget=true (this will be atomic hence no downtime)
Schematically, it would look like this in JS
let fname = '/tmp/data.bson';
let apiUrls = [...];
async function doRequest(url) {
// perform a request and return an array of records
}
let responses = await Promise.all(apiUrls.map(doRequest));
// if the data too big to fit in memory, use streams instead of this:
let data = flatMap(responses, BSON.serialize).join('\n'));
await fs.writeFile(fname, data);
await child_process.exec(`mongoimport --collection prices_new --drop ${fname}`);
await db.prices_new.renameCollection('prices', true);
There's no need to clear the database and do a fresh insert. You can use the bulkWrite() method for this or use the updateMany() method to do the updates.
You can refactor the existing code to
const update = (cb) => {
_request(urls, responses => {
let bulkUpdateOps = [], gameid;
responses.forEach(url => {
let response = responses[url];
gameid = url.split('/')[5].split('?')[0];
if (response.err) {
logger.error(`Error in request to ${url}: ${response.err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
Object.keys(jsonResult).forEach(key => {
bulkUpdateOps.push({
"updateOne": {
"filter": { market_hash_name: key },
"update": { "$set": {
game_id: gameid,
price: jsonResult[key]
} },
"upsert": true
}
});
});
}
if (bulkUpdateOps.length === 1000) {
Price.bulkWrite(bulkUpdateOps).then(result => {
logger.info(`Updated Items`)
}).catch(e => logger.error(`Error bulking: ${e}`));
bulkUpdateOps = [];
}
});
if (bulkUpdateOps.length > 0) {
Price.bulkWrite(bulkUpdateOps).then(result => {
logger.info(`Updated Items`)
}).catch(e => logger.error(`Error bulking: ${e}`));
}
});
if (cb && typeof cb == 'function') {
cb();
}
}
I have not tested anything but you can try this, might be helpful. I am using bluebird library for concurrency.
let _request = (url) => {
return new Promise((resolve, reject) => {
request(url, (err, response, body) => {
if (err) {
reject(err);
}
resolve(body);
});
});
};
const formatRespose = async (response) => {
// do stuff
return {
query: {}, // itemid: id,
body: {}
};
}
const bulkUpsert = (allItemsArray) => {
let bulk = Price.collection.initializeUnorderedBulkOp();
return new Promise((resolve, reject) => {
allItemsArray.forEach(item => {
bulk.find(item.query).upsert().updateOne(item.body);
});
bulk.execute((err, bulkers) => {
if (err) {
return reject(err);
}
return resolve(bulkers);
});
});
}
const getAndUpdateData = async (urls) => {
const allItemsArray = urls.map((url) => {
const requestData = await _request(url); // you can make this also parallel
const formattedData = formatRespose(requestData); // return {query: {},body: {} };
return formattedData;
});
return await (bulkUpsert(allItemsArray));
};
function update() {
// split urls into as per your need 100/1000
var i, j, chunkUrls = [],
chunk = 100;
for (i = 0, j = urls.length; i < j; i += chunk) {
chunkUrls.push(getAndUpdateData(urls.slice(i, i + chunk)));
}
Bluebird.map(chunkUrls, function (chunk) {
return await chunk;
}, {
concurrency: 1 // depends on concurrent request change 1 = 100 request get and insert in db at time
}).then(function () {
console.log("done");
}).catch(function () {
console.log("error");
});
}