How to approach this scenario in async programming /node js - javascript

I am writing a service in nodejs. Please read below for the approach i am doing to solve this problem.
First I call a rest endpoint(say /offers) to fetch data . say it cloudSenseData
Final Response I need to massage/manipulate the data to give back the needed response as output.
During massaging the data(from above call) I have to check if there is a relatedProduct info present.
if present I need to call another rest endpoint(say /offers/:id/products)
the :id is catalogueitemid obtained in previous call(cloudSenseData) to get more relatedProduct info details which i can include in the final massaged output.
So lets say in cloudSenseData I have 10 catalogueItems.
These are the steps i am doing during massaging the data:
Using async.map on cloudSenseData and mapping it to the needed response format.(I used it to make things parallel done)and have a callback function doing the need
In the callback apart while making the response as needed I am checking if it has relatedProduct info
if it doesnt have no issue
else i am calling downstream endpoint to get more relatedProductInfo using catologueItemId.(here i am using deasync )
This is taking more time than needed.
Can anyone please suggest any alternatives to approach this?
Update with Code : common-nodejs is a library we have written that wraps many functionalities like calling the rest endpoints using restify, reading the app configuration and many such. The below code is in typescript .
Hope this helps.
import {log,serviceInfo,HttpMethod} from "common-nodejs";
import { CloudsenseBaasJsonAction } from './cloudsense-baas-connector';
import {Constants} from "./Constants";
let request = require('request');
let deasync = require('deasync');
let moment = require("moment");
let async= require("async");
let PropertiesReader = require("properties-reader");
let endpointsConfigFile = require("../../config/endpoints.json");
//load the properties file to map the cloudsense attributes with required keys.
let parseConfig=new PropertiesReader("config/fields_config.properties");
// helper method in adding more details in response by reading the properties file
export let parsePropertiesFile = function(attribute,microserviceResponse,key,value){
let cloudSenseKey = attribute[key];
let microServiceKey = parseConfig.get(cloudSenseKey);
//console.log("********cloudSenseKey***************",cloudSenseKey,"************ microServiceKey***" ,microServiceKey);
if( microServiceKey!= undefined && microServiceKey!=null){
// console.log("********microServiceKey***************",microServiceKey ,attribute[value]);
microserviceResponse[microServiceKey] = attribute[value];
}
};
// this method does the fetching the detailed info if relatedProducts are there
export let requestRelatedProductsInfo = function(offerId):any{
// console.log("****************Above to Parse*******");
let body={};
let cloudsenseBaasJsonAction = new CloudsenseBaasJsonAction(HttpMethod.GET, body, '');
let sendRequestForRelatedProducts = deasync(function(callback){
request({
proxy: serviceInfo.extras.internetProxy,
url: serviceInfo.extras.serviceCloudsense.apiEndpoint+"/services/current/offer/"+offerId+"/products",
method: endpointsConfigFile.cloudsense_baas.offers.method,
headers: cloudsenseBaasJsonAction.modifyHeadersWithParams({
"csTime": Date.now(),
"method": HttpMethod[endpointsConfigFile.cloudsense_baas.offers.method],
"path": "/services/current/offer/"+offerId+"/products",
"clientKey": serviceInfo.extras.serviceCloudsense.clientKey,
"clientSecret": serviceInfo.extras.serviceCloudsense.clientSecret
})
},function (err, res, body) {
if(res.statusCode==404 || res.statusCode==500){
console.log("********res***offerId*************",res.statusCode,offerId);
}
if(err){
// console.log("*****************Errors****************",err);
callback(err,null);
}
callback(null,body);
});
});
return JSON.parse(sendRequestForRelatedProducts());
}
export class Parser {
/*
* This method is used to massage the cloudsense data and respond with the below formate
*
* {
* "catalogueId": "a26O0000000SOS7IAO",
* "price": 1536,
* "name": "IPHONE 6S PLUS",
* "default": "true",
* "color": "Silver",
* "memory": "128GB",
* "contentId": "IPHONE6S128GBSILVER",
* "featured": "true",
* "isOutright": "Outright",
* "brand": "Apple",
* "startdate": "01-09-2016",
* "enddate": "01-09-2017",
* "OS": "iOS",
* "bluetick": true
* }
*
*
*/
public parseCloudsenseData(CloudsenseData:any,isDefaultFlow : boolean):any{
console.log('*******isDefaultFlow********',isDefaultFlow);
let current_Date = moment().format(Constants.DateFormate);
let parseCloudData = function(result,callback){
try{
let microserviceResponse={
"catalogueId" : result.catalogueId,
"catalogueItemId" : result.catalogueItemId,
"outrightPrice" : result.cscfga__One_Off_Charge__c,
"displayName" : result.name,
"currentDate" : current_Date,
"recurringPrice" : result.cscfga__Recurring_Charge__c
};
let key = Constants.Name;
let value = Constants.Value;
//fetch the list of attributes.
for(let att of result.attributes){
parsePropertiesFile(att,microserviceResponse,key,value);
}
debugger;
//fetching the relatedProducts Data. if there are relatedProducts calling the endpoint to get more details
if(!isDefaultFlow && result.relatedProducts!= undefined && result.relatedProducts!=null && result.relatedProducts.length>0 ){
let microserviceRelatedProductArray=[];
// debugger;
// result.catalogueItemId = 'caf71d86-bca3-4bed-a2d5-b233305b8e76'
let relatedProductArray = requestRelatedProductsInfo(result.catalogueItemId);
for(let relatedProduct of relatedProductArray.results){
// for(let relatedProduct of relatedProductArray){
let finalRelatedProduct ={
"productId" : relatedProduct.productId,
"name" : relatedProduct.name,
"sku" : relatedProduct.sku,
"productType" : relatedProduct.productType,
"productSubType" : relatedProduct.productSubType,
"outrightPrice" : relatedProduct.cscfga__One_Off_Charge__c,
"recurringPrice" : relatedProduct.cscfga__Recurring_Charge__c,
"contentId" : '',
"mobileRepaymnetOption":''
};
//This loop is there to find the content_id among available attributes dynamically.
for(let att of relatedProduct.attributes){
parsePropertiesFile(att,finalRelatedProduct,key,value);
}
microserviceRelatedProductArray.push(finalRelatedProduct);
} // end of for loop.
microserviceResponse.relatedProducts =microserviceRelatedProductArray;
}//end of if. ( view details flow).
// if(!isDefaultFlow && result.relatedProducts!= undefined && result.relatedProducts!=null && result.relatedProducts.length>0 ) {
// var catalogueItemIdArray = [];
// catalogueItemIdArray.push(result.catalogueId);
// }
return callback(null,microserviceResponse);
}catch(error){
// log.debug("************error block**********",error);
return callback(error,null);
}
};
let microServiceOutput;
//calling the parseCloudData method asynchronusly for each element in the array.
async.map(CloudsenseData.results, parseCloudData ,function (error,result){
if(error){
// console.log("***************Error***************",error);
microServiceOutput = {
"code":1005,
"message": "The downstream is not available"
};
return microServiceOutput;
}
microServiceOutput = result;
});
return microServiceOutput;
}//End of parseCloudsenseData();
}

Related

Execute promise or await with generated string variable

I am building a mongoose query and storing it in a variable call query. The code below shows it
let query = "Product.find(match)";
if (requestObject.query.sortBy) {
query = query.concat(".", "sort(sort)");
const parts = requestObject.query.sortBy.split(":");
sort[parts[0]] = parts[1] === "desc" ? -1 : 1;
}
if (requestObject.query.fields) {
query = query.concat(".", "select(fields)");
const fields = requestObject.query.fields.split(",").join(" ");
const items = await Product.find(match).sort(sort).select(fields); //.populate("category").exec();
/**const items = await Product.find(match).sort(sort).select("-__v"); //.populate("category").exec();**/
}
I am facing an issue when attempting to run a mongoose query that I have generated and stored in a string. When I run it in post man, the response is 200 but no data is returned. Below is a console.log(query) on line 2
what I hope to achieve is to have await or create a new promise execute the content id query variable like shown below
const items = new Promise((resolve) => resolve(query)); //.populate("category").exec();
items
? responseObject.status(200).json(items)
: responseObject
.status(400)
.json({ message: "Could not find products, please try again" });
I will appreciate it very much that and also if you can give me a better way of doing it, I will love that
This doesn't really make sense. You are building a string, not a query. You can't do anything with that string. (You could eval it, but you really shouldn't). Instead, build a query object!
let query = Product.find(match);
if (requestObject.query.sortBy) {
const [field, dir] = requestObject.query.sortBy.split(":");
const sort = {};
sort[field] = dir === "desc" ? -1 : 1;
query = query.sort(sort);
}
if (requestObject.query.fields) {
const fields = requestObject.query.fields.split(",");
query = query.select(fields);
}
//query.populate("category")
const items = await query.exec();
if (items) {
responseObject.status(200).json(items)
} else {
responseObject.status(400).json({ message: "Could not find products, please try again" });
}
If you really want to get that string for something (e.g. debugging), build it separately from the query:
let query = Product.find(match);
let queryStr = 'Product.find(match)';
if (requestObject.query.sortBy) {
const [field, dir] = requestObject.query.sortBy.split(":");
const sort = {[field]: dir === "desc" ? -1 : 1};
query = query.sort(sort);
queryStr += `.sort(${JSON.stringify(sort)})`;
}
if (requestObject.query.fields) {
const fields = requestObject.query.fields.split(",");
query = query.select(fields);
queryStr += `.select(${JSON.stringify(fields)})`;
}
//query.populate("category")
//queryStr += `.populate("category")`;
console.log(queryStr);
const items = await query.exec();
…

Having trouble console.log anything from this API, we're not allowed to use JQuery or Bootstrap, so I have to use fetch and bulma

so I've been having trouble trying to get anything to console.log from this API, the most I can get is a null or undefined return. I've tried just the base statement from the API company, and the code snippet runs for them but for some reason won't even return a console.log for me. not sure if it's a problem on their end or mine because it seems to run fine on their website. also as I said in the title, I have to use fetch
EDIT: I'm in coding school right now so I'm pretty new to all of this
EDIT: the dashboard shows that the API is being called, so why am I not getting any responses?
EDIT: I figured it out, the API wasn't working
// global variables
var countryInput = document.querySelector("#ipt-country");
var submitButton = document.querySelector("#submit-btn");
var currencyDisplay = document.querySelector("#currency-display");
// country currency array codes
var countryCurrencyArray =[ list off all countries with iso code for currency goes here, not going to list it bc its 270 lines long
];
// begins exchange rate function
function getExchangeRate() {
// grab data attribute
var countrySearch = countryInput.value.toLowerCase().trim();
var currencyUSA = "USD";
var countryCurrency = "AUD";
// var countryCurrency = countryInput.value.countryCurrencyArray[i];
// if else errors
if (countrySearch.length < 1) {
return(null);
} else {
var countryConversion = countryCurrencyArray.find(
(element) => element.name === countrySearch)
}
if (countryConversion === void 0) {
return(null);
} else {
var countryCurrency = countryConversion.code;
}
// fetch API to change currency rate from American Dollar (USD) to selected currency
fetch(
'https://currency-converter5.p.rapidapi.com/currency/convert?' +
'&from=' + currencyUSA +
'&to=' + countryCurrency +
'&amount=1',
{
"method": "GET",
"headers": {
"x-rapidapi-host": "currency-converter5.p.rapidapi.com",
"x-rapidapi-key": "API-KEY"
},
}).then(response => {
console.log(response);
})
.then(function(response) {
response.json().then(function(data){
for (i=0; i < data.response.countryCurrency.length; i++) {
var exchangeMoney = document.createElement("div");
exchangeMoney.classList.add(
"is-flex-mobile",
"column",
"has-text-centered",
"is-justify-content-space-evenly"
);
var exchangeOutput = document.createElement("div");
exchangeRateOutput.classList.add("level-item");
exchangeRateOutput.innerText = data.response.countryCurrency[i].code;
exchangeRateMoney.appendChild(exchangeOutput);
console.log(exchangeOutput);
}
});
}).catch(err => {
console.error('Request Failed', err);
});
}
getExchangeRate();
console.log(getExchangeRate());
submitButton.addEventListener("click", getExchangeRate);

Firebase Cloud Function updating ref with incorrect values

I want to add a new node to the database if the node doesn't exist. I don't want to return anything to the client, I just want to update the database with the new values. On the client I have a listener that observes the credit_counts property, once the update happens it receives it there and notifies all users that this particular user has a new credit.
In the code below I check to see if (!snapshot.exists() and if it's not there I add the node to the database using admin.database().ref('/user_credits/{creditId}/{userId}').set({ dict });. After pasting the url I check the db and the layout is:
I'm a Swift developer. In Swift I can just do:
Database.database().reference().child("/user_credits/\(creditId)/\(userId)").setValue(dict) and the tree will be correct.
user_credits > {creditId} > {userId} > dict are incorrect. It should be user_credits > sample_123 > user_xyz > dict values. Where am I going wrong at?
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp();
exports.updateViewsCtAtPostsRef = functions.https.onRequest((request, response) => {
const currentTimeStamp = Date.now();
const receivedTimeStamp = admin.database.ServerValue.TIMESTAMP;
const creditId = "sample_123";
const userId = "userId_xyz";
admin.database().ref('user_credits').child(creditId).child(userId).once('value', snapshot => {
if (!snapshot.exists()) {
var dict = {
"joined_date": receivedTimeStamp,
"timeStamp": receivedTimeStamp,
"credits_count": 1
};
return admin.database().ref('/user_credits/{creditId}/{userId}').set({ dict });
} else {
const previousTimeStamp = snapshot.child("timeStamp").val();
const creditsCount = snapshot.child("credits_count").val();
if (previousTimeStamp + whatever) < currentTimeStamp {
let updatedCount = creditsCount + 1
return admin.database().ref('/user_credits/{creditId}/{userId}').update({ "timeStamp": receivedTimeStamp, "credits_count": updatedCount });
} else {
return true
}
}
});
});
I had to change the ref to:
return admin.database().ref('/user_credits/' + creditId + '/' + userId).set({ "joined_date": receivedTimeStamp, "timeStamp": receivedTimeStamp, "credits_count": 1 });
I also had to update the ref inside the else statement to follow the same format.
The syntax is fine, but the reference does not match the structure; that should rather be:
admin.database().ref('user_credits').child(creditId).child(userId).child('dict')
... else there won't be any snapshot.child("timeStamp") or snapshot.child("credits_count").

(node:31260) UnhandledPromiseRejectionWarning

In function called "readPlcDataWriteToDB" i need to wait for data come before return a response. How can i do it ?
i am getting an error in this code about this problem. When i add "await" like "newData = await data;" it is same result no effects.
Please help me for solve this. Tnx..
const asyncErrorWrapper = require("express-async-handler");
var nodes7 = require('nodes7'); // This is the package name, if the repository is cloned you may need to require 'nodeS7' with uppercase S
var conn = new nodes7;
const MachineState = require("../models/MachineState");
var newData;
var doneReading = false;
var doneWriting = false;
var variables = {
nMachine1 : 'DB7,BYTE0',
nMachine2 : 'DB7,BYTE1',
nMachine3 : 'DB7,BYTE2',
nMachine4 : 'DB7,BYTE3',
nMachine5 : 'DB7,BYTE4',
nMachine6 : 'DB7,BYTE5',
nMachine7 : 'DB7,BYTE6',
nMachine8 : 'DB7,BYTE7',
nMachine9 : 'DB7,BYTE8',
nMachine10 : 'DB7,BYTE9',
nMachine11 : 'DB7,BYTE10',
nMachine12 : 'DB7,BYTE11',
nMachine13 : 'DB7,BYTE12',
nMachine14 : 'DB7,BYTE13',
nMachine15 : 'DB7,BYTE14'
};
var data;
conn.initiateConnection({port: 102, host: '192.168.200.1', rack: 0, slot: 1}, connected); // slot 2 for 300/400, slot 1 for 1200/1500
function connected(err) {
if (typeof(err) !== "undefined") {
// We have an error. Maybe the PLC is not reachable.
console.log(err);
process.exit();
}
conn.setTranslationCB(function(tag) {return variables[tag];}); // This sets the "translation" to allow us to work with object names
conn.addItems(['nMachine1' , 'nMachine2' , 'nMachine3' , 'nMachine4' , 'nMachine5' , 'nMachine6' , 'nMachine7' , 'nMachine8' , 'nMachine9' , 'nMachine10' , 'nMachine11' , 'nMachine12' , 'nMachine13' , 'nMachine14' , 'nMachine15']);
}
function valuesReady(anythingBad, values) {
if (anythingBad) { console.log("SOMETHING WENT WRONG READING VALUES!!!!"); }
//console.log(values);
console.log("Done reading.");
doneReading = true;
if (doneWriting) { process.exit(); }
data = values;
sendDataToDB(values);
}
const readPlcDataWriteToDB = asyncErrorWrapper(async (req,res,next) => {
await conn.readAllItems(valuesReady);
newData = data;
return res
.status(200)
.json({
success : true,
data : newData
});
});
const sendDataToDB = asyncErrorWrapper(async (req,res,next) => {
let allMachineStates = await MachineState.findOne();
allMachineStates.Machine.M1 = newData.nMachine1;
allMachineStates.Machine.M2 = newData.nMachine2;
allMachineStates.Machine.M3 = newData.nMachine3;
allMachineStates.Machine.M4 = newData.nMachine4;
allMachineStates.Machine.M5 = newData.nMachine5;
allMachineStates.Machine.M6 = newData.nMachine6;
allMachineStates.Machine.M7 = newData.nMachine7;
allMachineStates.Machine.M8 = newData.nMachine8;
allMachineStates.Machine.M9 = newData.nMachine9;
allMachineStates.Machine.M10 = newData.nMachine10;
allMachineStates.Machine.M11 = newData.nMachine11;
allMachineStates.Machine.M12 = newData.nMachine12;
allMachineStates.Machine.M13 = newData.nMachine13;
allMachineStates.Machine.M14 = newData.nMachine14;
allMachineStates.Machine.M15 = newData.nMachine15;
await allMachineStates.save();
console.log("PLC'den Alınan Verilere Göre Database Güncellendi");
});
module.exports = {
readPlcDataWriteToDB
};
enter image description here
Based on the documentation for nodes7, it appears that readAllItems does not return a Promise, but rather expects a callback. This means that await will not correctly wait for it, so the assignment to newData wouldn't work.
Either move the handling of newData to a callback, or try something like util.promisify to convert the library function to use Promises
It seems to me that you should put your await calls inside a try-catch structure. Like this:
try {
await myFunctionOne();
cons myConstOne = await myFunctionTwo();
.... whatever you need to put here ...
} catch(error) {
console.error(error);
}
From there you would get rid of the "Unhandled-Promise-Rejection" issue and you could see what is causing the problem.
Beside that, you may find useful these few tutorials about Async-Await, I myself learned from them and still sometimes refer to them.

Moongose or mongo skip for pagination return empty array? [duplicate]

I am writing a webapp with Node.js and mongoose. How can I paginate the results I get from a .find() call? I would like a functionality comparable to "LIMIT 50,100" in SQL.
I'm am very disappointed by the accepted answers in this question. This will not scale. If you read the fine print on cursor.skip( ):
The cursor.skip() method is often expensive because it requires the server to walk from the beginning of the collection or index to get the offset or skip position before beginning to return result. As offset (e.g. pageNumber above) increases, cursor.skip() will become slower and more CPU intensive. With larger collections, cursor.skip() may become IO bound.
To achieve pagination in a scaleable way combine a limit( ) along with at least one filter criterion, a createdOn date suits many purposes.
MyModel.find( { createdOn: { $lte: request.createdOnBefore } } )
.limit( 10 )
.sort( '-createdOn' )
After taking a closer look at the Mongoose API with the information provided by Rodolphe, I figured out this solution:
MyModel.find(query, fields, { skip: 10, limit: 5 }, function(err, results) { ... });
Pagination using mongoose, express and jade - Here's a link to my blog with more detail
var perPage = 10
, page = Math.max(0, req.params.page)
Event.find()
.select('name')
.limit(perPage)
.skip(perPage * page)
.sort({
name: 'asc'
})
.exec(function(err, events) {
Event.count().exec(function(err, count) {
res.render('events', {
events: events,
page: page,
pages: count / perPage
})
})
})
You can chain just like that:
var query = Model.find().sort('mykey', 1).skip(2).limit(5)
Execute the query using exec
query.exec(callback);
In this case, you can add the query page and/ or limit to your URL as a query string.
For example:
?page=0&limit=25 // this would be added onto your URL: http:localhost:5000?page=0&limit=25
Since it would be a String we need to convert it to a Number for our calculations. Let's do it using the parseInt method and let's also provide some default values.
const pageOptions = {
page: parseInt(req.query.page, 10) || 0,
limit: parseInt(req.query.limit, 10) || 10
}
sexyModel.find()
.skip(pageOptions.page * pageOptions.limit)
.limit(pageOptions.limit)
.exec(function (err, doc) {
if(err) { res.status(500).json(err); return; };
res.status(200).json(doc);
});
BTW
Pagination starts with 0
You can use a little package called Mongoose Paginate that makes it easier.
$ npm install mongoose-paginate
After in your routes or controller, just add :
/**
* querying for `all` {} items in `MyModel`
* paginating by second page, 10 items per page (10 results, page 2)
**/
MyModel.paginate({}, 2, 10, function(error, pageCount, paginatedResults) {
if (error) {
console.error(error);
} else {
console.log('Pages:', pageCount);
console.log(paginatedResults);
}
}
Query:
search = productName
Params:
page = 1
// Pagination
router.get("/search/:page", (req, res, next) => {
const resultsPerPage = 5;
let page = req.params.page >= 1 ? req.params.page : 1;
const query = req.query.search;
page = page - 1
Product.find({ name: query })
.select("name")
.sort({ name: "asc" })
.limit(resultsPerPage)
.skip(resultsPerPage * page)
.then((results) => {
return res.status(200).send(results);
})
.catch((err) => {
return res.status(500).send(err);
});
});
This is a example you can try this,
var _pageNumber = 2,
_pageSize = 50;
Student.count({},function(err,count){
Student.find({}, null, {
sort: {
Name: 1
}
}).skip(_pageNumber > 0 ? ((_pageNumber - 1) * _pageSize) : 0).limit(_pageSize).exec(function(err, docs) {
if (err)
res.json(err);
else
res.json({
"TotalCount": count,
"_Array": docs
});
});
});
Try using mongoose function for pagination. Limit is the number of records per page and number of the page.
var limit = parseInt(body.limit);
var skip = (parseInt(body.page)-1) * parseInt(limit);
db.Rankings.find({})
.sort('-id')
.limit(limit)
.skip(skip)
.exec(function(err,wins){
});
This is what I done it on code
var paginate = 20;
var page = pageNumber;
MySchema.find({}).sort('mykey', 1).skip((pageNumber-1)*paginate).limit(paginate)
.exec(function(err, result) {
// Write some stuff here
});
That is how I done it.
Simple and powerful pagination solution
async getNextDocs(no_of_docs_required: number = 5, last_doc_id?: string) {
let docs
if (!last_doc_id) {
// get first 5 docs
docs = await MySchema.find().sort({ _id: -1 }).limit(no_of_docs_required)
}
else {
// get next 5 docs according to that last document id
docs = await MySchema.find({_id: {$lt: last_doc_id}})
.sort({ _id: -1 }).limit(no_of_docs_required)
}
return docs
}
last_doc_id: the last document id that you get
no_of_docs_required: the number of docs that you want to fetch i.e. 5, 10, 50 etc.
If you don't provide the last_doc_id to the method, you'll get i.e. 5 latest docs
If you've provided the last_doc_id then you'll get the next i.e. 5 documents.
There are some good answers giving the solution that uses skip() & limit(), however, in some scenarios, we also need documents count to generate pagination. Here's what we do in our projects:
const PaginatePlugin = (schema, options) => {
options = options || {}
schema.query.paginate = async function(params) {
const pagination = {
limit: options.limit || 10,
page: 1,
count: 0
}
pagination.limit = parseInt(params.limit) || pagination.limit
const page = parseInt(params.page)
pagination.page = page > 0 ? page : pagination.page
const offset = (pagination.page - 1) * pagination.limit
const [data, count] = await Promise.all([
this.limit(pagination.limit).skip(offset),
this.model.countDocuments(this.getQuery())
]);
pagination.count = count;
return { data, pagination }
}
}
mySchema.plugin(PaginatePlugin, { limit: DEFAULT_LIMIT })
// using async/await
const { data, pagination } = await MyModel.find(...)
.populate(...)
.sort(...)
.paginate({ page: 1, limit: 10 })
// or using Promise
MyModel.find(...).paginate(req.query)
.then(({ data, pagination }) => {
})
.catch(err => {
})
Here is a version that I attach to all my models. It depends on underscore for convenience and async for performance. The opts allows for field selection and sorting using the mongoose syntax.
var _ = require('underscore');
var async = require('async');
function findPaginated(filter, opts, cb) {
var defaults = {skip : 0, limit : 10};
opts = _.extend({}, defaults, opts);
filter = _.extend({}, filter);
var cntQry = this.find(filter);
var qry = this.find(filter);
if (opts.sort) {
qry = qry.sort(opts.sort);
}
if (opts.fields) {
qry = qry.select(opts.fields);
}
qry = qry.limit(opts.limit).skip(opts.skip);
async.parallel(
[
function (cb) {
cntQry.count(cb);
},
function (cb) {
qry.exec(cb);
}
],
function (err, results) {
if (err) return cb(err);
var count = 0, ret = [];
_.each(results, function (r) {
if (typeof(r) == 'number') {
count = r;
} else if (typeof(r) != 'number') {
ret = r;
}
});
cb(null, {totalCount : count, results : ret});
}
);
return qry;
}
Attach it to your model schema.
MySchema.statics.findPaginated = findPaginated;
Above answer's holds good.
Just an add-on for anyone who is into async-await rather than
promise !!
const findAllFoo = async (req, resp, next) => {
const pageSize = 10;
const currentPage = 1;
try {
const foos = await FooModel.find() // find all documents
.skip(pageSize * (currentPage - 1)) // we will not retrieve all records, but will skip first 'n' records
.limit(pageSize); // will limit/restrict the number of records to display
const numberOfFoos = await FooModel.countDocuments(); // count the number of records for that model
resp.setHeader('max-records', numberOfFoos);
resp.status(200).json(foos);
} catch (err) {
resp.status(500).json({
message: err
});
}
};
you can use the following line of code as well
per_page = parseInt(req.query.per_page) || 10
page_no = parseInt(req.query.page_no) || 1
var pagination = {
limit: per_page ,
skip:per_page * (page_no - 1)
}
users = await User.find({<CONDITION>}).limit(pagination.limit).skip(pagination.skip).exec()
this code will work in latest version of mongo
A solid approach to implement this would be to pass the values from the frontend using a query string. Let's say we want to get page #2 and also limit the output to 25 results.
The query string would look like this: ?page=2&limit=25 // this would be added onto your URL: http:localhost:5000?page=2&limit=25
Let's see the code:
// We would receive the values with req.query.<<valueName>> => e.g. req.query.page
// Since it would be a String we need to convert it to a Number in order to do our
// necessary calculations. Let's do it using the parseInt() method and let's also provide some default values:
const page = parseInt(req.query.page, 10) || 1; // getting the 'page' value
const limit = parseInt(req.query.limit, 10) || 25; // getting the 'limit' value
const startIndex = (page - 1) * limit; // this is how we would calculate the start index aka the SKIP value
const endIndex = page * limit; // this is how we would calculate the end index
// We also need the 'total' and we can get it easily using the Mongoose built-in **countDocuments** method
const total = await <<modelName>>.countDocuments();
// skip() will return a certain number of results after a certain number of documents.
// limit() is used to specify the maximum number of results to be returned.
// Let's assume that both are set (if that's not the case, the default value will be used for)
query = query.skip(startIndex).limit(limit);
// Executing the query
const results = await query;
// Pagination result
// Let's now prepare an object for the frontend
const pagination = {};
// If the endIndex is smaller than the total number of documents, we have a next page
if (endIndex < total) {
pagination.next = {
page: page + 1,
limit
};
}
// If the startIndex is greater than 0, we have a previous page
if (startIndex > 0) {
pagination.prev = {
page: page - 1,
limit
};
}
// Implementing some final touches and making a successful response (Express.js)
const advancedResults = {
success: true,
count: results.length,
pagination,
data: results
}
// That's it. All we have to do now is send the `results` to the frontend.
res.status(200).json(advancedResults);
I would suggest implementing this logic into middleware so you can be able to use it for various routes/ controllers.
You can do using mongoose-paginate-v2. For more info click here
const mongoose = require('mongoose');
const mongoosePaginate = require('mongoose-paginate-v2');
const mySchema = new mongoose.Schema({
// your schema code
});
mySchema.plugin(mongoosePaginate);
const myModel = mongoose.model('SampleModel', mySchema);
myModel.paginate().then({}) // Usage
I have found a very efficient way and implemented it myself, I think this way is the best for the following reasons:
It does not use skip, which time complexity doesn't scale well;
It uses IDs to query the document. Ids are indexed by default in MongoDB, making them very fast to query;
It uses lean queries, these are known to be VERY performative, as they remove a lot of "magic" from Mongoose and returns a document that comes kind of "raw" from MongoDB;
It doesn't depend on any third party packages that might contain vulnerabilities or have vulnerable dependencies.
The only caveat to this is that some methods of Mongoose, such as .save() will not work well with lean queries, such methods are listed in this awesome blog post, I really recommend this series, because it considers a lot of aspects, such as type security (which prevents critical errors) and PUT/ PATCH.
I will provide some context, this is a Pokémon repository, the pagination works as the following: The API receives unsafeId from the req.body object of Express, we need to convert this to string in order to prevent NoSQL injections (it could be an object with evil filters), this unsafeId can be an empty string or the ID of the last item of the previous page, it goes like this:
/**
* #description GET All with pagination, will return 200 in success
* and receives the last ID of the previous page or undefined for the first page
* Note: You should take care, read and consider about Off-By-One error
* #param {string|undefined|unknown} unsafeId - An entire page that comes after this ID will be returned
*/
async readPages(unsafeId) {
try {
const id = String(unsafeId || '');
let criteria;
if (id) {
criteria = {_id: {$gt: id}};
} // else criteria is undefined
// This query looks a bit redundant on `lean`, I just really wanted to make sure it is lean
const pokemon = await PokemonSchema.find(
criteria || {},
).setOptions({lean: true}).limit(15).lean();
// This would throw on an empty page
// if (pokemon.length < 1) {
// throw new PokemonNotFound();
// }
return pokemon;
} catch (error) {
// In this implementation, any error that is not defined by us
// will not return on the API to prevent information disclosure.
// our errors have this property, that indicate
// that no sensitive information is contained within this object
if (error.returnErrorResponse) {
throw error;
} // else
console.error(error.message);
throw new InternalServerError();
}
}
Now, to consume this and avoid Off-By-One errors in the frontend, you do it like the following, considering that pokemons is the Array of Pokémons documents that are returned from the API:
// Page zero
const pokemons = await fetchWithPagination({'page': undefined});
// Page one
// You can also use a fixed number of pages instead of `pokemons.length`
// But `pokemon.length` is more reliable (and a bit slower)
// You will have trouble with the last page if you use it with a constant
// predefined number
const id = pokemons[pokemons.length - 1]._id;
if (!id) {
throw new Error('Last element from page zero has no ID');
} // else
const page2 = await fetchWithPagination({'page': id});
As a note here, Mongoose IDs are always sequential, this means that any newer ID will always be greater than the older one, that is the foundation of this answer.
This approach has been tested agaisnt Off-By-One errors, for instance, the last element of a page could be returned as the first element of the following one (duplicated), or an element that is between the last of the previous page and the first of the current page might disappear.
When you are done with all the pages and request a page after the last element (one that does not exist), the response will be an empty array with 200 (OK), which is awesome!
The easiest and more speedy way is, paginate with the objectId
Example;
Initial load condition
condition = {limit:12, type:""};
Take the first and last ObjectId from response data
Page next condition
condition = {limit:12, type:"next", firstId:"57762a4c875adce3c38c662d", lastId:"57762a4c875adce3c38c6615"};
Page next condition
condition = {limit:12, type:"next", firstId:"57762a4c875adce3c38c6645", lastId:"57762a4c875adce3c38c6675"};
In mongoose
var condition = {};
var sort = { _id: 1 };
if (req.body.type == "next") {
condition._id = { $gt: req.body.lastId };
} else if (req.body.type == "prev") {
sort = { _id: -1 };
condition._id = { $lt: req.body.firstId };
}
var query = Model.find(condition, {}, { sort: sort }).limit(req.body.limit);
query.exec(function(err, properties) {
return res.json({ "result": result);
});
The best approach (IMO) is to use skip and limit BUT within a limited collections or documents.
To make the query within limited documents, we can use specific index like index on a DATE type field. See that below
let page = ctx.request.body.page || 1
let size = ctx.request.body.size || 10
let DATE_FROM = ctx.request.body.date_from
let DATE_TO = ctx.request.body.date_to
var start = (parseInt(page) - 1) * parseInt(size)
let result = await Model.find({ created_at: { $lte: DATE_FROM, $gte: DATE_TO } })
.sort({ _id: -1 })
.select('<fields>')
.skip( start )
.limit( size )
.exec(callback)
Most easiest plugin for pagination.
https://www.npmjs.com/package/mongoose-paginate-v2
Add plugin to a schema and then use model paginate method:
var mongoose = require('mongoose');
var mongoosePaginate = require('mongoose-paginate-v2');
var mySchema = new mongoose.Schema({
/* your schema definition */
});
mySchema.plugin(mongoosePaginate);
var myModel = mongoose.model('SampleModel', mySchema);
myModel.paginate().then({}) // Usage
let page,limit,skip,lastPage, query;
page = req.params.page *1 || 1; //This is the page,fetch from the server
limit = req.params.limit * 1 || 1; // This is the limit ,it also fetch from the server
skip = (page - 1) * limit; // Number of skip document
lastPage = page * limit; //last index
counts = await userModel.countDocuments() //Number of document in the collection
query = query.skip(skip).limit(limit) //current page
const paginate = {}
//For previous page
if(skip > 0) {
paginate.prev = {
page: page - 1,
limit: limit
}
//For next page
if(lastPage < counts) {
paginate.next = {
page: page + 1,
limit: limit
}
results = await query //Here is the final results of the query.
const page = req.query.page * 1 || 1;
const limit = req.query.limit * 1 || 1000;
const skip = (page - 1) * limit;
query = query.skip(skip).limit(limit);
This is example function for getting the result of skills model with pagination and limit options
export function get_skills(req, res){
console.log('get_skills');
var page = req.body.page; // 1 or 2
var size = req.body.size; // 5 or 10 per page
var query = {};
if(page < 0 || page === 0)
{
result = {'status': 401,'message':'invalid page number,should start with 1'};
return res.json(result);
}
query.skip = size * (page - 1)
query.limit = size
Skills.count({},function(err1,tot_count){ //to get the total count of skills
if(err1)
{
res.json({
status: 401,
message:'something went wrong!',
err: err,
})
}
else
{
Skills.find({},{},query).sort({'name':1}).exec(function(err,skill_doc){
if(!err)
{
res.json({
status: 200,
message:'Skills list',
data: data,
tot_count: tot_count,
})
}
else
{
res.json({
status: 401,
message: 'something went wrong',
err: err
})
}
}) //Skills.find end
}
});//Skills.count end
}
Using ts-mongoose-pagination
const trainers = await Trainer.paginate(
{ user: req.userId },
{
perPage: 3,
page: 1,
select: '-password, -createdAt -updatedAt -__v',
sort: { createdAt: -1 },
}
)
return res.status(200).json(trainers)
Below Code Is Working Fine For Me.
You can add finding filters also and user same in countDocs query to get accurate results.
export const yourController = async (req, res) => {
const { body } = req;
var perPage = body.limit,
var page = Math.max(0, body.page);
yourModel
.find() // You Can Add Your Filters inside
.limit(perPage)
.skip(perPage * (page - 1))
.exec(function (err, dbRes) {
yourModel.count().exec(function (err, count) { // You Can Add Your Filters inside
res.send(
JSON.stringify({
Articles: dbRes,
page: page,
pages: count / perPage,
})
);
});
});
};
You can write query like this.
mySchema.find().skip((page-1)*per_page).limit(per_page).exec(function(err, articles) {
if (err) {
return res.status(400).send({
message: err
});
} else {
res.json(articles);
}
});
page : page number coming from client as request parameters.
per_page : no of results shown per page
If you are using MEAN stack following blog post provides much of the information to create pagination in front end using angular-UI bootstrap and using mongoose skip and limit methods in the backend.
see : https://techpituwa.wordpress.com/2015/06/06/mean-js-pagination-with-angular-ui-bootstrap/
You can either use skip() and limit(), but it's very inefficient. A better solution would be a sort on indexed field plus limit().
We at Wunderflats have published a small lib here: https://github.com/wunderflats/goosepage
It uses the first way.
If you are using mongoose as a source for a restful api have a look at
'restify-mongoose' and its queries. It has exactly this functionality built in.
Any query on a collection provides headers that are helpful here
test-01:~$ curl -s -D - localhost:3330/data?sort=-created -o /dev/null
HTTP/1.1 200 OK
link: </data?sort=-created&p=0>; rel="first", </data?sort=-created&p=1>; rel="next", </data?sort=-created&p=134715>; rel="last"
.....
Response-Time: 37
So basically you get a generic server with a relatively linear load time for queries to collections. That is awesome and something to look at if you want to go into a own implementation.
app.get("/:page",(req,res)=>{
post.find({}).then((data)=>{
let per_page = 5;
let num_page = Number(req.params.page);
let max_pages = Math.ceil(data.length/per_page);
if(num_page == 0 || num_page > max_pages){
res.render('404');
}else{
let starting = per_page*(num_page-1)
let ending = per_page+starting
res.render('posts', {posts:data.slice(starting,ending), pages: max_pages, current_page: num_page});
}
});
});

Categories

Resources