I am currently doing a GET request that uses a bunch of methods that I wrote to query the database and display them. This works great, but I want to make it a POST request method so that the methods do not have to depend on req.query, deal with a json body instead of a string of URL params which would would facilitate the url string not to have anything else except the endpoints and make it as dynamic as possible. Any idea how to do this?
This is my controller method:
exports.getBooks = async (req, res, next) => {
const bookList = new BookList(Book.find(), req.query)
.filter()
.sort()
.paginate();
const books = await bookList.query;
res.status(200)
.json({
books,
});
};
This is the BookList class that has all the methods:
class BookList {
constructor(query, queryString) {
this.query = query;
this.queryString = queryString;
}
filter() {
const queryObj = { ...this.queryString };
const excludedFields = ['page', 'sort', 'limit', 'fields'];
excludedFields.forEach(el => delete queryObj[el]);
let queryStr = JSON.stringify(queryObj);
this.query = this.query.find(JSON.parse(queryStr));
return this;
}
sort() {
if (this.queryString.sort) {
const sortBy = this.queryString.sort.split(',').join(' ');
this.query = this.query.sort(sortBy);
} else {
this.query = this.query.sort('-createdAt');
}
return this;
}
paginate() {
const page = Number(this.queryString.page) || 1;
const limit = Number(this.queryString.limit) || 100;
const skip = (page - 1) * limit;
this.query = this.query.skip(skip).limit(limit);
return this;
}
}
module.exports = BookList;
This is what worked for me:
exports.getBooks = async (req, res, next) => {
let bookBody = req.body
const bookList = new BookList(Book.find(), req.query)
.filter(bookBody, req)
.sort()
.paginate();
const books = await bookList.query;
res.status(200)
.json({
books,
});
};
filter(bookBody, req) {
const filterBooks = bookBody.filter
const bookId = req.params.bookId
let requiredFilter
if (filterBooks) {
requiredFilter = {bookStatus: filterBooks.bookStatus, bookId};
} else {
requiredFilter = { bookId}
}
this.query = this.query.find(requiredFilter)
return this;
}
If you need to convert it to POST with json body then you will use something like below
var bodyParser = require('body-parser')
// parse application/json
app.use(bodyParser.json())
exports.getBooks = async (req, res, next) => {
const bookList = new BookList(Book.find(), req.body)
.filter()
.sort()
.paginate();
const allReports = await bookList.query;
res.status(200)
.json({
books,
});
};
In this I assumed that parameters name will still be the same. Also the two lines for json parser needs to be in the file where you initialise the express server
When using an asynchronous javascript XML (AJAX) request to send and receive back data from the server, it is possible to specify either a "POST" or a "GET" request. Any number of items may be sent or received via a single such request, as is shown in the sample script below. In your case, you would tailor it to have the script send your JSON data.
function AJAXroutine(val1,val2) { /* NAME IT WHATEVER YOU LIKE */
var formAction=document.getElementById("MyForm").getAttribute('action');
function Q(){
var K=new XMLHttpRequest();
var frm=new FormData();
/* CAN HAVE ANY NUMBER OF VALUES SENT BACK TO SERVER */
frm.append('V1',val1);
frm.append('V2',val2);
frm.append('V3',document.getElementById('another_elmt').value);
frm.append('V4',document.getElementById('another_one').value);
K.onreadystatechange=function(){
if(this.readyState==4&&this.status==200){
/* IF MULTIPLE VALUES ARE RETURNED */
var data=this.responseText.split("|");
document.getElementById('my_elmt').innerHTML=data[0];
document.getElementById('my_text').value=data[1];
/* ETC. */
/* IF JUST ONE VALUE IS RETURNED */
document.getElementById('my_elmt').value=this.responseText;
};
};
/* CAN OPTIONALLY CHANGE 'POST' TO 'GET' */
K.open('post',formAction);
K.send(frm)
}
Q();
}
This is a very basic template for an AJAX request and can be easily adapted to suit one's individual requirements.
I am trying to retrieve all the documents from a MongoDB cluster. I have followed code I've seen online, however I am facing a small problem.
const MongoClient = require('mongodb');
const uri = "mongodb+srv://<user>:<password>#cluster0-10soy.mongodb.net/test?retryWrites=true&w=majority";
var questionsArray = [];
MongoClient.connect(uri, function (err, client) {
const database = client.db("WhatSportWereYouMadeFor");
database.collection("Questions").find({}, (error, cursor) =>{
cursor.each(function(error, item){
if (item == null){
console.log(error);
}
questionsArray.push(item);
});
})
});
module.exports = { questionsArray };
I connect fine to the database, however I've set a breakpoint at the stop variable and that gets hit before any of the documents retrieved from the database get pushed to the questions array.
I've also tried wrapping the code inside an async function and then awaiting it before the stop variable, but still that breakpoint gets hit first and only after the documents get pushed to the array.
What I would do, this wrap the whole thing into a promise, and the export that.
const MyExport = () => {
return new Promise((resolve, reject) => {
var questionsArray = [];
MongoClient.connect(uri, function (err, client) {
const database = client.db("WhatSportWereYouMadeFor");
database.collection("Questions").find({}, (error, cursor) =>{
cursor.each(function(error, item){
if (item == null){
console.log(error);
}
questionsArray.push(item);
});
resolve(questionsArray)
})
});
})
}
module.exports.questionsArray = MyExport
But then when you import it, you need to run and await it
cosnt questionsArrayFunc = require("path/to/this/file").questionsArray
const questionsArray = await questionsArrayFunc()
I hope this is what you looking for. There might be some other way, but I think this works.
So I have two for loops, and one is nested inside another but the results they return seem to be running the first loop and returning its results than the nested loop. How could I make it run in a synchronous behavior?
For example, all the topicData gets printed in a row instead of printing one topicData and moving on to the nested for loop.
I'm not sure if this is the proper way to implement the async await. Any pointers would be appreciated. Thanks
exports.create = (event, context, callback) => {
var topicTimestamp = "";
var endpoint = "";
sns.listTopics(async function(err, data) {
if (err) {
console.log(err, err.stack);
} else {
console.log(data);
for (var topic in data.Topics){ //first loop
//var topicData = "";
//retrieve each topic and append to topicList if it is lakeview topic
var topicData = await data.Topics[topic].TopicArn;
topicTimestamp = topicData.slice(22, 34); //get only the topic createdAt
var params = {
TopicArn: topicData //topicData
};
console.log("SUBS per" + params.TopicArn);
//retrieve subscriptions attached to each topic
sns.listSubscriptionsByTopic(params, async function(err, subscriptionData){
console.log(subscriptionData);
//console.log("SUBS per" + params.TopicArn);
if (err) {
console.log(err, err.stack); // an error occurred
} else {
var endpointList = [];
for (var sub in subscriptionData.Subscriptions) { //nested loop
endpoint = await subscriptionData.Subscriptions[sub].Endpoint;
console.log("ENDPOINT:: " + endpoint);
endpointList.push(endpoint);
}
} // end of else listSub
//put topic info into table
var topicsParams = {
TableName: tableName,
Item: {
id: uuidv4(),
createdAt: timestamp,
topicCreatedAt: topicTimestamp,
topic: topicData,
phoneNumbers: endpointList
},
};
endpointList = []; //reset to empty array
dynamoDb.put(topicsParams, (error) => {...}
There are couple of issues here
You are trying to do callback style code in loops while you have promise methods available.
You could also do things in parallel using promise.all
Because of callback style the code is very complicated
You are awaiting where it is not required. For example in the callback
You can try to use this way
exports.create = async (event, context, callback) => {
try {
let topicTimestamp = "";
let endpoint = "";
const data = await sns.listTopics().promise();
// eslint-disable-next-line guard-for-in
for (const topic in data.Topics) { // first loop
// var topicData = "";
// retrieve each topic and append to topicList if it is lakeview topic
const topicData = data.Topics[topic].TopicArn;
topicTimestamp = topicData.slice(22, 34); // get only the topic createdAt
const params = {
"TopicArn": topicData // topicData
};
console.log(`SUBS per${ params.TopicArn}`);
const subscriptionData = await sns.listSubscriptionsByTopic(params).promise();
const endpointList = [];
// eslint-disable-next-line guard-for-in
for (const sub in subscriptionData.Subscriptions) { // nested loop
endpoint = subscriptionData.Subscriptions[sub].Endpoint;
console.log(`ENDPOINT:: ${ endpoint}`);
endpointList.push(endpoint);
}
// put topic info into table
const topicsParams = {
"TableName": tableName,
"Item": {
"id": uuidv4(),
"createdAt": timestamp,
"topicCreatedAt": topicTimestamp,
"topic": topicData,
"phoneNumbers": endpointList
}
};
// Similarly use dynamodb .promise functions here
}
} catch (Err) {
console.log(Err);
}
};
aws-sdk by default supports callback style. To convert them to promise you need to add .promise() at end.
At the moment this example is using for loop but you could do the same thing using Promise.all as well.
Hope this helps.
I'm currently using a node.js application to scan Twitter's API based on a set of parameters, and then uploading those JSON objects to a MongoDB database kept on MLab. I have connected to the database without issue, but my code will only upload ONE tweet before crashing. Here is the error message:
(node:62948) UnhandledPromiseRejectionWarning: Unhandled promise rejection (rejection id: 2): BulkWriteError: E11000 duplicate key error index: test-database.test-collection.$_id_ dup key: { : ObjectId('5aecb49e205197f5e4f52e32') }
It seems to have something to do with the keys that I am using in the database? How can I write my code so that I don't have this issue. Here is my program right now:
var Twitter = require("twitter");
var config = require("./config");
const mongoose = require("mongoose");
const MongoClient = require("mongodb");
var twitterClient = new Twitter(config);
const assert = require("assert");
const dbName = "test-database";
const collectionName = "test-collection";
const url = "mongodb://user:password#ds113870.mlab.com:13870/test-database";
const param = {follow: '21111098,958191744683782144,18061669,21111098,18061669,2891210047,1869975300,19394188,4107251,16056306,259459455,21111098,18061669,2891210047,1869975300,19394188,4107251,16056306,259459455,968650362,343041182,5558312,111671288,476256944,378631423,803694179079458816,30354991,224285242,45645232,235217558,20879626,150078976,278124059,102477372,249787913,381577682,15324851,435500714,823302838524739584,20597460,555355209,15745368,229966028,3001665106,2863210809,1397501864,78403308,253252536,47747074,1262099252,1284467173,92186819,169198625,600463589,413160266,1096059529,1095504170,1058520120,328679423,247334603,308794407,216503958,234128524,59969802,10615232,118740781,1383059977,2856787757,75364211,586730005,18632666,18632809,1249982359,339822881,365530059,216881337,3229124078,55677432,816683274076614656,26594419,1068481578,1068540380,19726613,13529632,18137749,3067974778,109071031,278094476,21406834,1129029661,970207298,357606935,236511574,145292853,76456274,456137574,33537967,941000686275387392,555474658,264219447,11650762,16160352,57065141,753693622692970497,21269970,238177562,389554914,11651202,214767677,515822213,16473577,1071402577,323490669,1480852568,2962923040,2987970190,811313565760163844,3145735852,266133081,41363507,109287731,14125897,946549322,361569788,15808765,1603426344,18695134,407039290,1099199839,183062944,60828944,325231436,14140370,17494010,1872999342,72198806,709389393811927041,21157904,213339899,2964174789,22195441,1061029050,460376288,382791093,106733567,43910797,24768753,18915145,240790556,2612307559,7270292,20546536,225921757,27044466,250188760,292495654,122124607,29201047,223166587,171598736,94154021,221162525,26062385,486694111,242555999,770121222,14845376,432895323,3219708271,217543151,81191343,2955485182,978029858,296361085,26533227,76649729,21669223,283130017,73303753,13218102,1648117711,1074480192,23022687,262756641,18170310,88784440,242836537,946946130,172858784,7429102,409719505,293131808,158470209,117501995,35567751,193794406,158890005,234374703,113355380,1074518754,87510313,233737858,291756142,1848942470,202206694,499268312'};
let newTweet = {
name: "",
text: "",
followers: ""
}
MongoClient.connect(url, function(err, client){
assert.equal(null,err);
console.log("connected.");
const db = client.db(dbName);
const collection = db.collection(collectionName);
const insertDocument = function(db, callback){
// THIS IS WHERE I THINK THE PROBLEM IS //
collection.insert(newTweet)
}
twitterClient.stream('statuses/filter',param,function(stream) {
stream.on('data', function(tweet) {
newTweet.name = tweet.user.screen_name;
newTweet.followers = tweet.user.followers_count;
newTweet.text = (tweet.extended_tweet) ? tweet.extended_tweet.text : tweet.text;
insertDocument(newTweet, function(){
db.close();
});
});
});
});
You are closing the database immediately after inserting one tweet.
// stream.on('data', function(tweet) {
insertDocument(newTweet, function(){
db.close();
});
Instead, close the connection on stream end.
stream.on('end', function() {
db.close();
});
You are getting the Duplicate key issue, because you declared the newTweet object globally, which shares the same object for every tweet. Declare the tweet object inside the stream.on('data') handler function. i.e.
stream.on('data', function(tweet) {
let newTweet = {};
newTweet.name = tweet.user.screen_name;
newTweet.followers = tweet.user.followers_count;
I've read through the Firebase Cloud Functions reference, guides and sample code to try and determine why my function is triggered twice, but am yet to find a successful resolution. I've also trialed Firebase-Queue as a work-around, however its latest update suggests Cloud Functions is the way to go.
In short, I'm retrieving notices from an external API using request-promise, checking those notices against ones I already have in my database, and when a new notice is identified, posting it to said database. The corresponding venue is then updated with a reference to the new notice. Code is as follows:
'use strict';
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const request = require('request');
const rp = require('request-promise');
admin.initializeApp(functions.config().firebase);
const db = admin.database();
const venues = db.ref("/venues/");
exports.getNotices = functions.https.onRequest((req, res) => {
var options = {
uri: 'https://xxxxx.xxxxx',
qs: {
format: 'json',
type: 'venue',
...
},
json: true
};
rp(options).then(data => {
processNotices(data);
console.log(`venues received: ${data.length}`);
res.status(200).send('OK');
})
.catch(error => {
console.log(`Caught Error: ${error}`);
res.status(`${error.statusCode}`).send(`Error: ${error.statusCode}`);
});
});
function processNotices(data) {
venues.once("value").then(snapshot => {
snapshot.forEach(childSnapshot => {
var existingKey = childSnapshot.val().key;
for (var i = 0; i < data.length; i++) {
var notice = data[i];
var noticeKey = notice.key;
if (noticeKey !== existingKey) {
console.log(`New notice identified: ${noticeKey}`)
postNotice(notice);
}
}
return true;
});
});
}
function postNotice(notice) {
var ref = venues.push();
var key = ref.key;
var loc = notice.location;
return ref.set(notice).then(() => {
console.log('notice posted...');
updateVenue(key, loc);
});
}
function updateVenue(key, location) {
var updates = {};
updates[key] = "true";
var venueNoticesRef = db.ref("/venues/" + location + "/notices/");
return venueNoticesRef.update(updates).then(() => {
console.log(`${location} successfully updated with ${key}`);
});
}
Any suggestions as to how to rectify the double-triggering would be greatly appreciated. Thanks in advance!
Problem solved - some misinformation from the Firebase Console Logs (repeating entries), coupled with nested for loops in the wrong order were responsible for the apparent double triggering.