Nested for loop in nodejs seems to be running asynchronously - javascript

So I have two for loops, and one is nested inside another but the results they return seem to be running the first loop and returning its results than the nested loop. How could I make it run in a synchronous behavior?
For example, all the topicData gets printed in a row instead of printing one topicData and moving on to the nested for loop.
I'm not sure if this is the proper way to implement the async await. Any pointers would be appreciated. Thanks
exports.create = (event, context, callback) => {
var topicTimestamp = "";
var endpoint = "";
sns.listTopics(async function(err, data) {
if (err) {
console.log(err, err.stack);
} else {
console.log(data);
for (var topic in data.Topics){ //first loop
//var topicData = "";
//retrieve each topic and append to topicList if it is lakeview topic
var topicData = await data.Topics[topic].TopicArn;
topicTimestamp = topicData.slice(22, 34); //get only the topic createdAt
var params = {
TopicArn: topicData //topicData
};
console.log("SUBS per" + params.TopicArn);
//retrieve subscriptions attached to each topic
sns.listSubscriptionsByTopic(params, async function(err, subscriptionData){
console.log(subscriptionData);
//console.log("SUBS per" + params.TopicArn);
if (err) {
console.log(err, err.stack); // an error occurred
} else {
var endpointList = [];
for (var sub in subscriptionData.Subscriptions) { //nested loop
endpoint = await subscriptionData.Subscriptions[sub].Endpoint;
console.log("ENDPOINT:: " + endpoint);
endpointList.push(endpoint);
}
} // end of else listSub
//put topic info into table
var topicsParams = {
TableName: tableName,
Item: {
id: uuidv4(),
createdAt: timestamp,
topicCreatedAt: topicTimestamp,
topic: topicData,
phoneNumbers: endpointList
},
};
endpointList = []; //reset to empty array
dynamoDb.put(topicsParams, (error) => {...}

There are couple of issues here
You are trying to do callback style code in loops while you have promise methods available.
You could also do things in parallel using promise.all
Because of callback style the code is very complicated
You are awaiting where it is not required. For example in the callback
You can try to use this way
exports.create = async (event, context, callback) => {
try {
let topicTimestamp = "";
let endpoint = "";
const data = await sns.listTopics().promise();
// eslint-disable-next-line guard-for-in
for (const topic in data.Topics) { // first loop
// var topicData = "";
// retrieve each topic and append to topicList if it is lakeview topic
const topicData = data.Topics[topic].TopicArn;
topicTimestamp = topicData.slice(22, 34); // get only the topic createdAt
const params = {
"TopicArn": topicData // topicData
};
console.log(`SUBS per${ params.TopicArn}`);
const subscriptionData = await sns.listSubscriptionsByTopic(params).promise();
const endpointList = [];
// eslint-disable-next-line guard-for-in
for (const sub in subscriptionData.Subscriptions) { // nested loop
endpoint = subscriptionData.Subscriptions[sub].Endpoint;
console.log(`ENDPOINT:: ${ endpoint}`);
endpointList.push(endpoint);
}
// put topic info into table
const topicsParams = {
"TableName": tableName,
"Item": {
"id": uuidv4(),
"createdAt": timestamp,
"topicCreatedAt": topicTimestamp,
"topic": topicData,
"phoneNumbers": endpointList
}
};
// Similarly use dynamodb .promise functions here
}
} catch (Err) {
console.log(Err);
}
};
aws-sdk by default supports callback style. To convert them to promise you need to add .promise() at end.
At the moment this example is using for loop but you could do the same thing using Promise.all as well.
Hope this helps.

Related

Where can I put an async function so I can use await to send a discord response?

I am pulling data from a Google Sheet and after some help, I have reached the point where the discord bot is responding to the inquiry, with the correct data, but it responds with each row of the map array in separate responses rather than gathering the data and sending it in one message. So I believe I need to use an async function and the await feature. I just can't seem to figure out where or how to put it in. I've been stuck on this for a few hours now and can't seem to get it to work?
Here is my code:
const { google } = require('googleapis');
const { sheets } = require('googleapis/build/src/apis/sheets');
const keys = require('../Data/client_secret.json');
const { Team, DiscordAPIError } = require('discord.js');
const Command = require("../Structures/Command");
const gclient = new google.auth.JWT(
keys.client_email,
null,
keys.private_key,
['https://www.googleapis.com/auth/spreadsheets']
);
module.exports = new Command({
name: "freeagents",
description: "Potential free agents for next season.",
async run(message, args, client) {
gclient.authorize(function(err,tokens){
if(err){
console.log(err);
return;
} else {
console.log("Connected!");
gsrun(gclient);
}
});
async function gsrun(cl){
const gsapi = google.sheets({version:'v4', auth: cl });
gsapi.spreadsheets.values.get({
spreadsheetId: "11e5nFk50pDztDLngwTSmossJaNXNAGOaLqaGDEwrbQM",
range: 'Keepers!C1:F',
}, (err, res) => {
if (err) return console.log('The API returned an error: ' + err);
const rows = res.data.values;
if (rows.length) {
const cells = rows.filter(cell => cell[3])
cells.map((cell) => {
console.log(`${cell[0]}, ${cell[1]}`);
console.log(`${cells}`);
return message.reply(`${cell[0]}, ${cell[1]}`);
});
} else {
console.log('No data found.');
}
})
};
}
});
Any help would be greatly appreciated!
You're receiving multiple messages because you're sending the message inside cells.map((cell) => {}). That method will call a function on each member of the array. You should instead iterate over the cells array to produce a single string that you can then send as a message, for example:
const strings = [];
for (let cell of cells) {
strings.push(`${cell[0]}, ${cell[1]}`);
}
return message.reply(strings.join("\n"));

Trouble with asynchronous code and mongodb

This code searches for a company then searches for all of the websites listed in an array on that company, then searches for all the conversations on that website, then searches for all the messages for each conversation, then sends those arrays of message ObjectIDs to the helper function which then returns an array of JSON data for each message. Fhew.. that was a mouthful.
I need to somehow wait for all of this to complete before cleaning it up a bit then res.send'ing it. All of the code works and the console.log(messagesWithData) posts a few arrays of messages (since it is sent a few in this scenario).
All help is appreciated :)
Company.findOne({ 'roles.admins': userId }, function (err, doc) {
if (!err) {
for (const item of doc.websites) {
Website.findById(item, function (err, doc) {
for (const item of doc.conversations) {
Conversation.findById(item, function (err, doc) {
async function findMessageData() {
var messagesWithData = await helper.takeMessageArray(
doc.messages
);
await sendMessages(messagesWithData);
}
findMessageData();
async function sendMessages(messagesWithData) {
// not sure what to put here!
console.log(messagesWithData)
}
});
}
});
}
} else {
res.send(err);
}
});
Code above can be simplified a bit with async/await
const company = await Company.findOne({ 'roles.admins': userId });
let allMessages = []
for (const websiteId of company.websites) {
const website = await Website.findById(websiteId);
for (const conversationId of website.conversations) {
const conversation = await Conversation.findById(conversationId);
const messagesWithData = await helper.takeMessageArray(
conversation.messages
);
allMessages = [...allMessages, ...messagesWithData]
}
}
// Everything completed, messages stored in one place...
console.log(allMessages)

Retrieving documents for MongoDB cluster

I am trying to retrieve all the documents from a MongoDB cluster. I have followed code I've seen online, however I am facing a small problem.
const MongoClient = require('mongodb');
const uri = "mongodb+srv://<user>:<password>#cluster0-10soy.mongodb.net/test?retryWrites=true&w=majority";
var questionsArray = [];
MongoClient.connect(uri, function (err, client) {
const database = client.db("WhatSportWereYouMadeFor");
database.collection("Questions").find({}, (error, cursor) =>{
cursor.each(function(error, item){
if (item == null){
console.log(error);
}
questionsArray.push(item);
});
})
});
module.exports = { questionsArray };
I connect fine to the database, however I've set a breakpoint at the stop variable and that gets hit before any of the documents retrieved from the database get pushed to the questions array.
I've also tried wrapping the code inside an async function and then awaiting it before the stop variable, but still that breakpoint gets hit first and only after the documents get pushed to the array.
What I would do, this wrap the whole thing into a promise, and the export that.
const MyExport = () => {
return new Promise((resolve, reject) => {
var questionsArray = [];
MongoClient.connect(uri, function (err, client) {
const database = client.db("WhatSportWereYouMadeFor");
database.collection("Questions").find({}, (error, cursor) =>{
cursor.each(function(error, item){
if (item == null){
console.log(error);
}
questionsArray.push(item);
});
resolve(questionsArray)
})
});
})
}
module.exports.questionsArray = MyExport
But then when you import it, you need to run and await it
cosnt questionsArrayFunc = require("path/to/this/file").questionsArray
const questionsArray = await questionsArrayFunc()
I hope this is what you looking for. There might be some other way, but I think this works.

MongoDB&JavaScript heap out of memory

The data size in telemetry table is HUGE. So, I get "JavaScript heap out of memory" error.
How do I overcome that error?
const aloUrl = `mongodb://${userName}:${pwd}#${host}:${port}/${dbName}`;
MongoClient.connect(aloUrl, function(err, client) {
if (err) {
return console.log('ERROR:: ', err);
}
console.log("INFO:: OK");
const db = client.db(dbName);
var arr = db.collection('endpoint').find({provider:"KMR"}).map(e => e._id).toArray((err, result) => {
if (err){
console.log("ERROR", err)
}
var son = db.collection('telemetry').find({endpoint: {$in: result}}).toArray().then(arr =>{
console.log("Let's start to party")
for (let i = 0; i < 10; i++) {
console.log("\t" + arr[i]._id)
}
}).catch(e => {
console.log(`ERROR::${e}`)
})
})
});
From the mongodb docs,
The toArray() method returns an array that contains all the documents
from a cursor. The method iterates completely the cursor, loading all
the documents into RAM and exhausting the cursor.
Thus instead of calling toArray, you should use the next or forEach (or some other method which doesn't load everything at once to RAM), to iterate through the elements one by one.
For example, to print all the documents in your telemetry collection ONE BY ONE, you can do this,
db.collection('telemetry')
.find({
endpoint: {
$in: result
}
})
.forEach((document) => {
console.log(document)
});
I would suggest you to use forEach instead of toArrayin order to fetch and load w/o exhaustion.
For huge data it's always advised to stream (it's achieved by cursor in mongo).
also
$lookup is new in MongoDB 3.2. It performs a left outer join to an unsharded collection in the same database to filter in documents from the “joined” collection for processing.
You can have a look at aggregation pipeline for mongo
updating your code with aggregate.
var MongoClient = require('mongodb').MongoClient;
// Connection URL
const aloUrl = `mongodb://${userName}:${pwd}#${host}:${port}/${dbName}`;
MongoClient.connect(aloUrl, function (err, client) {
console.log("INFO:: OK");
const db = client.db(dbName);
const col = db.collection('endpoint');
var cursor = col.aggregate([
{
$match: {provider: "KMR"}
},
{
$lookup:
{
from : "telemetry",
localField : "_id",
foreignField: "endpoint",
as : "telemetry"
}
}
]);
console.log("Let's start to party")
cursor.on('data', function (data) {
console.log("\t" + data._id)
});
cursor.on('end', function () {
console.log("Done ");
});
});

NodeJs Mongoose collecting data in loop

Hope for your help.
I have collection tasks with document like this schema.
Task = {
title:'taskName',
performers:[ {userId:1,price:230}, {userId:2,price:260} ]
}
Profiles = { id:1, name: 'Alex', surname: 'Robinson', etc.. }
Finally, I shoul collect all data, and in response return Array of profiles objects. Problem is that for-loop end before finished all .findOne() for every elements, and it return empty Array.
This is code form get.
CODE HERE:
apiRoutes.get('/performers/:id', function(req,res,next){
var profArr = [];
Task.findOne({'_id':req.params.id},function(err, doc){
for(var i = 0; i<doc.performers.length; i++){
var profile = {
price: 0,
name: '',
surname: ''
};
profile.price = doc.performers[i].price;
Profile.findOne({'_id':doc.performers[i].userId},function(err,doc){
if (err) throw err;
profile.name = doc.name;
profile.surname = doc.surname;
profArr.push(profile);
});
}
return res.json({success:true,
message:'Performers data collected',
data:profArr});
});
The problem is you need to return response inside mongoose query. You can't use any values assigned inside the query outside. For example :
var sampleArr = [];
Users.find({}, function(err, users) {
users.forEach(function(user) {
Students.find({'userid' : user.id}, function(err, student) {
sampleArr.push({
'student' : student
});
})
console.log(sampleArr);
// It will only return empty array[];
})
})
So, your task should be like this:
apiRoutes.get('/performers/:id', function(req,res,next){
var profArr = [];
// Get a task by ID
Task.findById(req.params.id, function (err, task) {
// Get all profiles
Profile.find({}, function (err, profiles) {
task.performers.forEach(function(taskPerformer) {
profiles.forEach(function(profile) {
// Check the performer ID is the same with userID or not
if (profile._id == taskPerformer.userId) {
profArr.push({
price: taskPerformer.price,
name: profile.name,
surname: profile.surname
});
}
})
});
return res.json({
success:true,
message:'Performers data collected',
data:profArr
});
});
})
A simple idea would be to introduce a countdown-counter before you start your for-loop like this:
var countdown = doc.performers.length;
Decrement the countdown in the callback function of the findOne-Calls. Check if you have reached 0 and call a function outside to send the result.
But still your code doesn't look very efficient. There are a lot of calls to the db. Maybe you could rethink your datamodel in order to minimize the calls to the db.
Your "for" loop will be finished before findOne will be finished.

Categories

Resources