Node.js Call a method after another method is completed - javascript

I would like to call my "app.get('/news/news-desc', (req, res)" method after "app.get('/news/api/:newsName', function(req, res)" is completed.
Here is my code:
let articleUrlArray = [];
app.get('/news/api/:newsName', function(req, res) {
const API_KEY = 'example';
let data = '';
const techCrunchURL = `https://newsapi.org/v2/top-headlines?sources=techcrunch&apiKey=${API_KEY}`
switch(req.params.newsName) {
case 'tech-crunch':
request(techCrunchURL, function(err, response, html) {
let formattedData = JSON.parse(response.body);
for(let i = 0; i < formattedData.articles.length; i++) {
articleUrlArray.push(formattedData.articles[i].url);
}
data = response.body;
res.setHeader('Content-Type', 'application/json');
res.send(data);
});
break;
default:
data = 'Please type in correct news source';
break;
}
})
const checkBody = res => (err, response, html) => {
const $ = cheerio.load(html);
const articleContent = $('.article-content').children('p')
const bodyOne = articleContent.eq(0).text()
const bodyTwo = articleContent.eq(1).text()
const isExtensive = bodyOne.split(' ').length > 50
res(isExtensive ? { bodyOne } : { bodyOne, bodyTwo })
}
const getArticle = article => new Promise(res => request(article, checkBody(res)))
app.get('/news/news-desc', (req, res) => {
Promise.all(articleUrlArray.map(getArticle)).then(data => res.send(JSON.stringify(data)))
})
As you can see, the first method calls the "newsapi.org" and gets 10 articles. Then it would only extract the urls of those articles and push them into articleUrlArray.
After the urls have been pushed into the articleUrlArray, it would look like this:
let articleUrlArray = [ 'https://techcrunch.com/2018/05/19/shared-housing-startups-are-taking-off/',
'https://techcrunch.com/2018/05/19/shared-housing-startups-are-taking-off/',
'https://techcrunch.com/2018/05/19/my-data-request-lists-guides-to-get-data-about-you/',
'https://techcrunch.com/2018/05/19/siempos-new-app-will-break-your-smartphone-addiction/',
'https://techcrunch.com/2018/05/19/la-belle-vie-wants-to-compete-with-amazon-prime-now-in-paris/',
'https://techcrunch.com/2018/05/19/apple-started-paying-15-billion-european-tax-fine/',
'https://techcrunch.com/2018/05/19/original-content-dear-white-people/',
'https://techcrunch.com/2018/05/19/meet-the-judges-for-the-tc-startup-battlefield-europe-at-vivatech/',
'https://techcrunch.com/2018/05/18/nasas-newest-planet-hunting-satellite-takes-a-stellar-first-test-image/',
'https://techcrunch.com/video-article/turning-your-toys-into-robots-with-circuit-cubes/',
'https://techcrunch.com/2018/05/18/does-googles-duplex-violate-two-party-consent-laws/' ];
It would just be filled up with urls.
Then the second method, would use the filled up articleUrlArray to do its own thing.
However, currently for my code, the second method runs first before the articleUrlArray has been filled up.
I would like to run the second method after the first method completes and the articleUrlArray has been filled up with urls.
Could you please help me with this?

let articleUrlArray = [];
const addArticleUrl = url => articleUrlArray.push(url)
const checkBody = res => (err, response, html) => {
const $ = cheerio.load(html);
const articleContent = $('.article-content').children('p')
const bodyOne = articleContent.eq(0).text()
const bodyTwo = articleContent.eq(1).text()
const isExtensive = bodyOne.split(' ').length > 50
res(isExtensive ? { bodyOne } : { bodyOne, bodyTwo })
}
const getArticle = article => new Promise(res => request(article, checkBody(res)))
const newsDescMiddleware = app.get('/news/news-desc', (req, res) => {
Promise.all(articleUrlArray.map(getArticle)).then(data => res.send(JSON.stringify(data)))
})
const techCrunch = res => url => request(url, (err, response, html) => {
let formattedData = JSON.parse(response.body);
formattedData.articles.forEach(article => addArticleUrl(article.url))
res(response.body)
})
const getNewsByName = (newsName, url) => new Promise((res, reject) => ({
'tech-crunch': techCrunch(res)(url)
}[newsName])) || reject()
const getNewsByNameMiddleware = (req, res) => {
const API_KEY = 'example';
const techCrunchURL = `https://newsapi.org/v2/top-headlines?sources=techcrunch&apiKey=${API_KEY}`
getNewsByName(req.params.newsName, url)
.then(body => {
res.setHeader('Content-Type', 'application/json');
res.send(body)
})
.catch(() => res.send('Please type in correct news source'))
}
app.get('/news/api/:newsName', getNewsByNameMiddleware, newsDescMiddleware)
Here, I made you some middlewares.
I am assuming that you don't need the response of the previous middleware.
I like to split the code by its responsibilities and write it functionally.

You can separate the core logic of the first route to a function and re-use it in both places, if you please. however you still need to provide newsName parameter to GET '/news/news-desc' endpoint.
Example for your code.
let articleUrlArray = [];
function getNewsNames(newsName, callback) {
const API_KEY = 'example';
let data = '';
const techCrunchURL = `https://newsapi.org/v2/top-headlines?sources=techcrunch&apiKey=${API_KEY}`
switch (newsName) {
case 'tech-crunch':
request(techCrunchURL, function (err, response, html) {
let formattedData = JSON.parse(response.body);
for (let i = 0; i < formattedData.articles.length; i++) {
articleUrlArray.push(formattedData.articles[i].url);
}
data = response.body;
callback(null, data);
});
break;
default:
data = 'Please type in correct news source';
callback('Error', data);
break;
}
}
app.get('/news/api/:newsName', function (req, res) {
getNewsNames(req,params.newsName, (err, data) => {
if (!err) {
res.setHeader('Content-Type', 'application/json');
}
return res.send(data);
})
})
const checkBody = res => (err, response, html) => {
const $ = cheerio.load(html);
const articleContent = $('.article-content').children('p')
const bodyOne = articleContent.eq(0).text()
const bodyTwo = articleContent.eq(1).text()
const isExtensive = bodyOne.split(' ').length > 50
res(isExtensive ? { bodyOne } : { bodyOne, bodyTwo })
}
const getArticle = article => new Promise(res => request(article, checkBody(res)))
app.get('/news/news-desc/:newsName', (req, res) => {
getNewsNames(req.params.newsName, (err, data) => {
// by now, the articleUrlArray array will be filled
Promise.all(articleUrlArray.map(getArticle)).then(data => res.send(JSON.stringify(data)))
})
})

Related

Convert form data to JSON file with NodeJS

I have a project in Node JS in which I have a form to add new users.
How can I view this information in JSON format?
These are the data that I see:
name age country city
------------------------------
user1 22 Spain Madrid button{View JSON}
When I press the 'View JSON' button, the following must be displayed below the table:
[
"id": 1,
"name": "user1",
"age": 22,
"country": "Spain" {
"city":"Madrid"
}
]
My problem: how can I create a function that performs this conversion? How do I call the function from index.ejs?
I cleared and merged the codes. And I created a new endpoint as /export to export the data as CSV file. I couldn't test it so let me know if it doesn't work.
const express = require('express');
const bodyParser = require('body-parser');
const app = express();
const MongoClient = require('mongodb').MongoClient;
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json());
app.use(express.static('public'));
app.set('views', './src/views');
app.get('/', async (req, res) => {
const db = await mongoDB();
const person = await db.collection('person').find().toArray();
res.render('index.ejs', { person: person })
})
app.get('/export', async (req, res) => {
await convertCSV();
res.status(200).send( { success: 1 });
})
app.post('/person', async (req, res) => {
res.redirect('/');
})
app.listen(process.env.PORT, function () {
console.log(`server: http://${process.env.HOST}:${process.env.PORT}`);
})
const mongoDB = () => {
return new Promise((resolve, reject) => {
const url = 'mongodb://127.0.0.1:27017';
MongoClient.connect(url, { useUnifiedTopology: true })
.then(client => {
const db = client.db('users')
resolve(db);
})
.catch(error => reject(error))
});
}
const convertCSV = () => {
return new Promise((resolve, reject) => {
const converter = require("json-2-csv");
const fetch = require("node-fetch");
const fs = require("fs");
const flatten = require('flat');
const maxRecords = 10;
const getJson = async () => {
const response = await fetch(`http://${process.env.HOST}:${process.env.PORT}/users.json`);
const responseJson = await response.json();
return responseJson;
};
const convertToCSV = async () => {
const json = await getJson();
let keys = Object.keys(flatten(json[0]));
let options = {
keys: keys
};
converter.json2csv(json, json2csvCallback, options);
};
let json2csvCallback = function (err, csv) {
if (err) throw err;
const headers = csv.split('\n').slice(0, 1);
const records = csv.split('\n').slice(0,);
for (let i = 1; i < records.length; i = i + maxRecords) {
let dataOut = headers.concat(records.slice(i, i + 3)).join('\n');
let id = Math.floor(i / maxRecords) + 1;
fs.writeFileSync('data' + id + '.csv', dataOut)
}
};
await convertToCSV();
resolve();
})
}
However, it is not a good practice at all to using controller, index and route in the same file. A better approach would be to create routes, controllers folders and put the codes in a more orderly form.
Something like this (You can find better ones of course mine is just advice):
- index.js
- router.js (A router to manage your endpoints)
- controllers (Controller when you call the endpoint)
-> export.controller.js
-> person.controller.js
- routes (Endpoints)
-> export.route.js
-> person.route.js
- helpers
-> databaseHandler.js (Database connection handler)

Node - wait for map to finish before continuing

I have this file in my node app that supposed to go fetch me some data about every league champion from their official website using cheerio and its going all great but when I add all the data to my array to then return it as json data the write function runs before the map finishes so I just creating a json file with an empty array in it:
const request = require('request');
const cheerio = require('cheerio');
const fs = require('fs');
const champions = fs.readFileSync('champions.json');
const championsObj = JSON.parse(champions);
let champsList = [];
championsObj.map(champ => {
request(champ.href, (err, res, html) => {
if (!err && res.statusCode == 200) {
const $ = cheerio.load(html);
const champName = $('.style__Title-sc-14gxj1e-3 span').text();
let skins = [];
const skinsList = $('.style__CarouselItemText-sc-1tlyqoa-16').each(
(i, el) => {
const skinName = $(el).text();
skins.push = skinName;
}
);
const champion = {
champName,
skins
};
console.log(champion);
champsList.push = champion;
}
});
});
const jsonContent = JSON.stringify(champsList);
fs.writeFile('champions2.json', jsonContent, 'utf8', function(err) {
if (err) {
console.log(err);
}
});
I'm not a node expert but I tried using Promise but it didn't work but I'm not sure maybe I used it wrong.
UPDATE #1: using axios
championsObj.map(async champ => {
const html = await axios.get(champ.href);
const $ = await cheerio.load(html);
const champName = $('.style__Title-sc-14gxj1e-3 span').text();
let skins = [];
const skinsList = $('.style__CarouselItemText-sc-1tlyqoa-16').each(
(i, el) => {
const skinName = $(el).text();
skins.push = skinName;
}
);
const champion = {
champName,
skins
};
console.log(champion);
champsList.push = champion;
});
you can use await Promise.all(<array>.map(async () => {...}). it does not require any additional dependencies. however you have no guarantees about the order of asynchronous iterations (starting all the iterations in the right order, but no guarantees about iterations' endings).
Your problem here is that Array#map doesn't wait for asynchronous functions such as the request calls to finish before moving on. I recommend p-map with got. To ensure perfect execution order, I also recommend reading and writing the file asynchronously.
const got = require('got');
const pMap = require('p-map');
const cheerio = require('cheerio');
const fs = require('fs').promises;
(async () => {
const champions = JSON.parse(await fs.readFile('champions.json', 'utf8'));
let champsList = await pMap(champions, async champ => {
const {
body
} = await got(champ.href)
const $ = cheerio.load(body);
const champName = $('.style__Title-sc-14gxj1e-3 span').text();
let skins = [];
$('.style__CarouselItemText-sc-1tlyqoa-16').each(
(_, el) => {
const skinName = $(el).text();
skins.push(skinName);
}
);
const champion = {
champName,
skins
};
console.log(champion);
return champion;
})
await fs.writeFile('champions2.json', JSON.stringify(champsList));
})();

Requesting different endpoints with firestore Cloud Function

I am trying to have a flexible Cloud Function that executes on different end points.
My original Cloud Function looks like this:
const functions = require('firebase-functions')
const admin = require('firebase-admin')
const _ = require('lodash')
const { getObjectValues } = require('./helper-functions.js')
admin.initializeApp()
const json2csv = require('json2csv').parse
exports.csvJsonReport = functions.https.onRequest((request, response) => {
const db = admin.firestore()
const userAnswers = db.collection('/surveys/CNA/submissions')
return (
userAnswers
.get()
// eslint-disable-next-line promise/always-return
.then(querySnapshot => {
let surveySubmissions = []
querySnapshot.forEach(doc => {
const userSubmission = doc.data()
surveySubmissions.push({
..._.mapValues(userSubmission.answers, getObjectValues), // format answers
...userSubmission.anonUser,
})
})
const csv = json2csv(surveySubmissions)
response.setHeader('Content-disposition', 'attachment; filename=cna.csv')
response.set('Content-Type', 'text/csv')
response.status(200).send(csv)
})
.catch(error => {
console.log(error)
})
)
})
I am trying to extend this function to work on multiple collections. In the above function I am targeting the CNA collection. so instead of db.collection('/surveys/CNA/submissions/') I would like it to be db.collection('/surveys/:surveyId/submissions/')
Below is my attempt at trying to extend my original Cloud Function:
const functions = require('firebase-functions')
const admin = require('firebase-admin')
const express = require('express')
const bodyParser = require('body-parser')
const _ = require('lodash')
const { getObjectValues } = require('./helper-functions.js')
admin.initializeApp(functions.config().firebase)
const db = admin.firestore()
const app = express()
const main = express()
main.use('/api/v1', app)
main.use(bodyParser.json())
exports.webApi = functions.https.onRequest(main)
app.get('surveys/:id', (request, response) => {
const surveyId = request.query
const userAnswers = db.collection(`/survey/${surveyId}/submissions`)
return (
userAnswers
.get()
// eslint-disable-next-line promise/always-return
.then(querySnapshot => {
let surveySubmissions = []
querySnapshot.forEach(doc => {
const userSubmission = doc.data()
surveySubmissions.push({
..._.mapValues(userSubmission.answers, getObjectValues), // format answers
...userSubmission.anonUser,
})
})
const csv = json2csv(surveySubmissions)
response.setHeader('Content-disposition', 'attachment; filename=cna.csv')
response.set('Content-Type', 'text/csv')
response.status(200).send(csv)
})
.catch(error => {
console.log(error)
})
)
})
When I request my endpoint: myapp.firebaseapp.com/api/v1/surveys/CNA
Cannot GET /api/v1/surveys/CNA is shown in my browser.
Could someone please point me in the right direction?
To crate a GET /survey/:id endpoint in order to fetch a submission by id, use the following code in your new Cloud Function:
app.get('surveys/:id', (request, response) => {
const surveyId = request.params.id
const userAnswers = db.collection(`/survey/${surveyId}/submissions`)
Let me know if it works for you.

How to return REST API response after utility execution is finished in expressJs

I have written one POST endpoint in expressJS with node.when I a make call to API It runs a utility with setInterval() and I want to send the API response after utility executes clearInterval().
How I can I wait and send response after utility execution is finished?
Please see the code below
REST API code:
const router= express.Router();
const multer= require('multer');
const {readCSVFile}= require('../util/index');
var storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, 'uploads');
},
filename: (req, file, cb) => {
cb(null, file.fieldname + '-' + Date.now()+'.xlsx');
}
});
var upload = multer({storage: storage});
router.post('/fileUpload', upload.single('filename'), async (req, res) => {
readCSVFile();
res.status(201).json({id:1});
});
router.get('/',(req,res)=>{
res.sendFile(__dirname+'/index.html');
});
module.exports=router;
Utilty Code
const config = require('config')
const excelToJson = require('convert-excel-to-json')
const HttpsProxyAgent = require('https-proxy-agent')
const AWS = require('aws-sdk')
const json2xls = require('json2xls')
const fs = require('fs')
const awsConfig = {
httpOptions: {
agent: new HttpsProxyAgent(
config.get('aws.proxy')
),
}
}
AWS.config.credentials = new AWS.SharedIniFileCredentials({
profile: config.get('aws.profile'),
})
AWS.config.update(awsConfig)
let uuidv4 = require('uuid/v4')
let csv = [];
const lexRunTime = new AWS.LexRuntime({
region: config.get('aws.region'),
})
let refreshId
const readCSVFile = () => {
const csvSheet = excelToJson({
sourceFile: './Test.xlsx',
})
csvSheet.Sheet1.forEach(element => {
csv.push((element.A.slice(0, element.A.length)))
})
runTask()
refreshId = setInterval(runTask, 1000)
}
let botParams = {
botAlias: config.get('bot.alias'),
botName: config.get('bot.name'),
sessionAttributes: {},
}
const missedUtterancesArray = []
const matchedUtterancesArray = []
let start = 0
let end = 50
let count = 50
const runTask = () => {
let itemsProcessed = 0
console.log('executing...')
const arrayChunks = csv.slice(start, end)
arrayChunks.forEach((element) => {
botParams.inputText = element
botParams.userId = `${uuidv4()}`
lexRunTime.postText(botParams, function (err, data) {
itemsProcessed++
if (err) console.log(err, err.stack)
else {
if (data.intentName === null) {
missedUtterancesArray.push({
Utterance: element,
})
}
else{
matchedUtterancesArray.push({
Utterance: element,
})
}
}
if (itemsProcessed === arrayChunks.length) {
start = csv.indexOf(csv[end])
end = start + count
}
if (start === -1) {
let xls = json2xls(missedUtterancesArray)
fs.writeFileSync('./MissedUtterances.xlsx', xls, 'binary')
let matchedXls = json2xls(matchedUtterancesArray)
fs.writeFileSync('./MatchedUtterances.xlsx', matchedXls, 'binary')
console.log('File saved successfully!! ')
console.log('Total Matched utterances count: ',csv.length-missedUtterancesArray.length)
console.log('Total Missed utterances count: ',missedUtterancesArray.length)
console.log('Total Utterances count: ',csv.length)
clearInterval(refreshId)
}
})
})
}
I would have needed few more information to answer this but pardon my try if this does not work -
the setInterval method in the readCSVFile the reason. Being an asynchronous function, this will not stop the code progression.
lexRunTime.postText also looks like asynchronous. I think you'd be better off with using promises while responding to the client.

Clearance code while working with Promises

I am new to Nodejs and as well as async programming so I am trying to make shopping project I just want to clarify that even though my program works just fine but is my approach correct or not
controllers file
exports.posteditproduct = (req, res, next) => {
const upprodid = req.body.productid;
const upprodtitle = req.body.title;
const upprodprice = req.body.price;
const upprodimg = req.body.imageurl;
const upproddesc = req.body.description;
Product(
upprodid,
upprodtitle,
upprodimg,
upprodprice,
upproddesc
);
Product.updateproduct(
upprodid,
upprodtitle,
upprodprice,
upprodimg,
upproddesc
).then(function(value){
console.log(value)
fs.writeFile(p,JSON.stringify(value),err=>{
console.log(err)
})
})
res.redirect('/')
}
models
static updateproduct(
upprodid,
upprodtitle,
upprodprice,
upprodimg,
upproddesc
) {
const p = path.join(
path.dirname(process.mainModule.filename),
'data',
'products.json'
)
return new Promise((resolve, reject) => {
fs.readFile(p, (err, data) => {
const allproducts = JSON.parse(data)
const existproductid = allproducts.findIndex(prod => prod.id === upprodid)
const upproduct = [...allproducts]
const spreadall = upproduct[existproductid]
spreadall.id = upprodid
spreadall.title = upprodtitle
spreadall.imageurl = upprodimg
spreadall.price = upprodprice
spreadall.description = upproddesc
upproduct[existproductid] = spreadall
return resolve(upproduct)
})
})
}
So is it okay if in my models file I do all the stuff and then return the final variable within the resolve ( ) and then in my controllers file I just use
.then(function(value) {
//and then write in file here
})
Would this approach be okay if I have a very large file with n no. of entries ?
Or should I write the writeFile method too inside the Promise in the updateproduct method?

Categories

Resources