Mongoose script to seed database hangs - javascript

I have the following script:
const db = require('../db')
const User = require('../models/user')
db.on('error', console.error.bind(console, 'MongoDB connection error:'))
const main = async () => {
const users = [
new User({ name: 'Benny', age: 28, status: 'active' }),
new User({ name: 'Claire', age: 28, status: 'active' })
]
const newUsers = async () => {
await users.forEach(async user => await user.save())
}
await newUsers()
console.log("Created users!")
}
const run = async () => {
await main()
process.exit(0)
}
run()
For some reason process.exit() executes before main() resolves and therefore I get no users created.
If I remove process.exit() my script works but hangs.
How do I get my script to work and exit once done executing?

Awaiting users.forEach() doesn't do anything because forEach doesn't have a return value, so there's nothing to await. It's likely iterating through the entire list and then exiting immediately, which then returns from main and calls process.exit() before the .save()s have a chance to execute.
What you can do, however, is wait for all the promises to finish by using Promise.all(). This will require you to map each user creation into a Promise, but that's what your User.save function is doing anyway, it's returning a Promise. Here's an example of how you can do that:
function save(user) {
// do something async here
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log(`${user.name} Saved!`);
resolve()
}, 1500);
});
}
const main = async () => {
const users = [
{ name: 'Benny', age: 28, status: 'active' },
{ name: 'Claire', age: 28, status: 'active' }
]
// map async processes into an array of promises
const newUsers = users.map(user => save(user));
// await the resolution of all promises, then proceeed
await Promise.all(newUsers);
console.log("Created users!")
}
const run = async () => {
await main()
console.log("done")
}
run()
The distinct benefit of doing it this way is that the database calls will happen in parallel, which will make your seeding process much faster. Conceptually, what you were trying to do would have waited for each database call to finish before starting the next one (aka running serially). If you need them to execute in order, then that's a good thing, but it doesn't seem like your use-case requires that.

Co-worker of mine proposed this solution:
const db = require('../db')
const User = require('../models/user')
db.on('error', console.error.bind(console, 'MongoDB connection error:'))
const main = async () => {
const users = [
new User({ name: 'Benny', age: 28, status: 'active' }),
new User({ name: 'Claire', age: 28, status: 'active' })
]
const newUsers = async () => {
await users.reduce(async (promise, user) => {
await user.save()
return promise
}, Promise.resolve())
}
await newUsers()
console.log("Created users!")
}
const run = async () => {
await main()
process.exit()
}
run()

Here is my solution - would love some feedback on improving it!
const db = require('../db')
const User = require('../models/user')
db.on('error', console.error.bind(console, 'MongoDB connection error:'))
const main = async () => {
const users = [
{ name: 'Benny', age: 28, status: 'active' },
{ name: 'Claire', age: 28, status: 'active' }
]
await User.insertMany(users)
console.log("Created users!")
}
const run = async () => {
await main()
db.close()
}
run()

Related

Close MONGODB connection after data insertion

I want to populate my database with some random data. I have used Faker.js for generating that data. I'm using MongoDB on my localhost and all the data is properly following all the validation rules from the schema. I'm having problem with the closing connection of my connection after insertion of data. I want to close the connection soon after the data is populated. I'm using async function to be aware of all the things but something is not going right.
Here is my code seeds.js which is the script im using to populate database
const path = require("path");
require("dotenv").config({ path: path.resolve(__dirname, "../.env") });
var mongoose = require("mongoose");
mongoose.connect(process.env.MONGODB_URI);
require("../models/User");
require("../models/Item");
require("../models/Comment");
var Item = mongoose.model("Item");
var Comment = mongoose.model("Comment");
var User = mongoose.model("User");
const ItemData = require("../data/item.json");
const CommentData = require("../data/comment.json");
const UserData = require("../data/user.json");
async function InsertData() {
ItemData.forEach(async (item) => {
item.seller = item.seller.$oid;
const oldItem = await Item.find({ title: item.title });
if (!oldItem.length) {
var newItem = new Item(item);
await newItem.save();
} else {
console.log(item.slug);
}
});
UserData.forEach(async (user) => {
const oldUser = await User.find({ username: user.username });
if (!oldUser.length) {
var user = new User(user);
await user.save();
} else {
console.log(user.username);
}
});
CommentData.forEach(async (comment) => {
comment.item = comment.item.$oid;
comment.seller = comment.seller.$oid;
var newComment = new Comment(comment);
const oldComment = await Comment.find({ _id: newComment.id });
if (!oldComment.length) {
await newComment.save();
} else {
console.log(comment.body);
}
});
}
async function cleanup() {
await Item.deleteMany({}, () => console.log("Data Cleared Item"));
await Comment.deleteMany({}, () => console.log("Data Cleared Comment"));
await User.deleteMany({}, () => console.log("Data Cleared User"));
}
async function main() {
InsertData().then(async () => {
console.debug('Data Inserted. Closing connection.');
await mongoose.connection.close();
});
}
main();
Here is the stack trace of the error
/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/core/connection/pool.js:841
cb(new MongoError('pool destroyed'));
^
MongoError: pool destroyed
at Pool.write (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/core/connection/pool.js:841:8)
at _command (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/core/wireprotocol/command.js:120:10)
at command (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/core/wireprotocol/command.js:28:5)
at Object.query (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/core/wireprotocol/query.js:66:3)
at Server.query (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/core/topologies/server.js:644:16)
at FindOperation.execute (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/operations/find.js:38:12)
at /Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/operations/execute_operation.js:144:17
at Server.selectServer (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/core/topologies/server.js:832:3)
at Server.selectServer (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/topologies/topology_base.js:342:32)
at executeWithServerSelection (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/operations/execute_operation.js:131:12)
at /Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/operations/execute_operation.js:70:9
at maybePromise (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/utils.js:685:3)
at executeOperation (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/operations/execute_operation.js:34:10)
at Cursor._initializeCursor (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/core/cursor.js:534:7)
at Cursor._initializeCursor (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/cursor.js:186:11)
at nextFunction (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/core/cursor.js:737:10)
error Command failed with exit code 1.

Async timeout in an array of objects

I have a database with cars and a 3rd party api where I can update some info about them. I have written this code in order to update every x days, but the problem is that the 3rd party database doesn't allow more than x connections per hour. So what I want to do is a function that does one request sequentially every 40s
My database looks like
[{id: 1, name: "car1", rating: 4.6, external_db_id: 12342},
{id: 2, name: "car2", rating: 4.7, external_db_id: 99999}]
Updated code:
async () => {
const allCars = await functionThatGetsCars()
for ( var i = 0; i < allCars.length; i++ ) {
let car = allCars[i]
let canUpdate = null
try{
canUpdate = await fetchExternalData(car.external_db_id)
} catch(err){
console.log("Error fetching external data", err)
canUpdate = null
}
if(canUpdate){
await new Promise((resolve, reject) => setTimeout(resolve, 40000))
const updated = await functionThatUpdatesCarsInMyDB({
id: car.id,
},
{
rating: canUpdate ? canUpdate.external_rating : car.rating
}
return updated
}
}
}
Problem is that right now that it does just update the first item from the loop. What would be the way to make it sequentially with a timeout of 40s? Thanks in advance!
Solved thanks to #FelixKling suggestions:
async () => {
const allCars = await functionThatGetsCars()
for ( var i = 0; i < allCars.length; i++ ) {
let car = allCars[i]
let canUpdate = null
try{
canUpdate = await fetchExternalData(car.external_db_id)
} catch(err){
console.log("Error fetching external data", err)
canUpdate = null
}
if(canUpdate){
await new Promise((resolve, reject) => setTimeout(resolve, 40000))
const updated = await functionThatUpdatesCarsInMyDB({
id: car.id,
},
{
rating: canUpdate ? canUpdate.external_rating : car.rating
}
}
}
}

Counter not increasing in async map function

I am working with mongodb and nodejs. I have an array of customers I have to create each inside database.
const promises2 = customers.map(async customer => {
if (!customer.customerId) {
const counter = await Counter.findOne({ type: "Customer" });
console.log({counter});
const payload = {
customerId: counter.sequence_value,
};
await Customer.create(payload);
await Counter.findOneAndUpdate({ type: "Customer" }, { $inc: { sequence_value: 1 } });
}
});
await Promise.all([...promises2]);
The issue is counter is not increasing every time. I am getting same counter in all the created customers. What is the issue here?
Issue is something like this but don't have an answer.
The problem is that all the calls overlap. Since the first thing they each do is get the current counter, they all get the same counter, then try to use it. Fundamentally, you don't want to do this:
const counter = await Counter.findOne({ type: "Customer" });
// ...
await Counter.findOneAndUpdate({ type: "Customer" }, { $inc: { sequence_value: 1 } });
...because it creates a race condition: overlapping asynchronous operations can both get the same sequence value and then both issue an update to it.
You want an atomic operation for incrementing and retrieving a new ID. I don't use MongoDB, but I think the findOneAndUpdate operation can do that for you if you add the returnNewDocument option. If so, the minimal change would be to swap over to using that:
const promises2 = customers.map(async customer => {
if (!customer.customerId) {
const counter = await Counter.findOneAndUpdate(
{ type: "Customer" },
{ $inc: { sequence_value: 1 } },
{ returnNewDocument: true }
);
console.log({counter});
const payload = {
customerId: counter.sequence_value,
};
await Customer.create(payload);
}
});
await Promise.all([...promises2]);
...but there's no reason to create an array and then immediately copy it, just use it directly:
await Promise.all(customers.map(async customer => {
if (!customer.customerId) {
const counter = await Counter.findOneAndUpdate(
{ type: "Customer" },
{ $inc: { sequence_value: 1 } },
{ returnNewDocument: true }
);
console.log({counter});
const payload = {
customerId: counter.sequence_value,
};
await Customer.create(payload);
}
}));
The overall operation will fail if anything fails, and only the first failure is reported back to your code (the other operations then continue and succeed or fail as the case may be). If you want to know everything that happened (which is probably useful in this case), you can use allSettled instead of all:
// Gets an array of {status, value/reason} objects
const results = await Promise.allSettled(customers.map(async customer => {
if (!customer.customerId) {
const counter = await Counter.findOneAndUpdate(
{ type: "Customer" },
{ $inc: { sequence_value: 1 } },
{ returnNewDocument: true }
);
console.log({counter});
const payload = {
customerId: counter.sequence_value,
};
await Customer.create(payload);
}
}));
const errors = results.filter(({status}) => status === "rejected").map(({reason}) => reason);
if (errors.length) {
// Handle/report errors here
}
Promise.allSettled is new in ES2021, but easily polyfilled if needed.
If I'm mistaken about the above use of findOneAndUpdate in some way, I'm sure MongoDB gives you a way to get those IDs without a race condition. But in the worst case, you can pre-allocate the IDs instead, something like this:
// Allocate IDs (in series)
const ids = [];
for (const customer of customers) {
if (!customer.customerId) {
const counter = await Counter.findOne({ type: "Customer" });
await Counter.findOneAndUpdate({ type: "Customer" }, { $inc: { sequence_value: 1 } });
ids.push(counter.sequence_value);
}
}
// Create customers (in parallel)
const results = await Promise.allSettled(customers.map(async(customer, index) => {
const customerId = ids[index];
try {
await Customer.create({
customerId
});
} catch (e) {
// Failed, remove the counter, but without allowing any error doing so to
// shadow the error we're already handling
try {
await Counter.someDeleteMethodHere(/*...customerId...*/);
} catch (e2) {
// ...perhaps report `e2` here, but don't shadow `e`
}
throw e;
}
});
// Get just the errors
const errors = results.filter(({status}) => status === "rejected").map(({reason}) => reason);
if (errors.length) {
// Handle/report errors here
}
Your map function is not returning a promise.
Try this :
const promises2 = [];
customers.map((customer) => {
return new Promise(async (resolve) => {
if (!customer.customerId) {
const counter = await Counter.findOne({ type: 'Customer' });
console.log({ counter });
const payload = {
customerId: counter.sequence_value,
};
await Customer.create(payload);
await Counter.findOneAndUpdate({ type: 'Customer' }, { $inc: { sequence_value: 1 } });
}
resolve();
});
});
await Promise.all(promises2);

How to run fetch() in a loop?

I am new to nodejs and promise based request. I want to fetch the data from a remote server in a loop, and then create a JSON object from all fetched data.
const fetch = require('node-fetch');
const users = [];
const ids = await fetch('https://remote-server.com/ids.json');
console.log(ids);
// [1,2,3]
ids.forEach(id => {
var user = await fetch(`https://remote-server.com/user/${id}.json`);
users.push(user);
});
console.log(users);
expected output
[
{
name: 'user 1',
city: 'abc'
},
{
name: 'user 2',
city: 'pqr'
},
{
name: 'user 3',
city: 'xyz'
}
]
So to launch in parallel:
const ids = await fetch('https://remote-server.com/ids.json');
const userPromises = ids.map(id => fetch(`https://remote-server.com/user/${id}.json`));
const users = await Promise.all(userPromises);
to launch in sequence:
const users = [];
const ids = await fetch('https://remote-server.com/ids.json');
for(const id of ids){
const user = await fetch(`https://remote-server.com/user/${id}.json`);
users.push(user);
}
You forgot to add async in the forEach:
ids.forEach(async (id) => { // your promise is in another function now, so you must specify async to use await
var user = await fetch(`https://remote-server.com/user/${id}.json`);
users.push(user);
});

PostgreSql results printing to console but not displaying in browser

I'm trying to get some data from a pg database to my api endpoint , I can print the results to the console but I can't get them to display in the browser with res.send. I'm guessing the problem is with global and local scope however I've not been able to figure it out. I'm using ES6 but transpiling with babel. Here's a snippet.
app.get('/', (request, response) => {
const { Pool, Client } = require('pg');
const config = {
user: '',
host: '',
database: '',
password: '',
port: ,
}
const pool = new Pool(config);
const client = new Client(config);
let whole = [];
client.connect();
const text = "SELECT * FROM entries where id='1'";
client.query(text)
.then(res => {
console.log(res.rows[0]);
whole.push(res.rows[0]);
})
.catch(e => console.error(e.stack));
response.send(whole);
client.end;
});
This logs to the console
{ id: 1, title: 'First title', body: 'beautiful body' }
However the browser only displays []
This is what babel transpiles it to which is the script I run in node.
var whole = [];
client.connect();
var text = "SELECT * FROM entries where id='1'";
client.query(text).then(function (res) {
console.log(res.rows[0]);
whole.push(res.rows[0]);
}).catch(function (e) {
return console.error(e.stack);
});
response.send(whole);
client.end;
response.send is called outside of the async promise .then resolver, and is therefore executed before you push the row data into the array. Moving response.send into the promise resolver should fix it.
client.query(text).then(res => {
whole.push(res.rows[0]);
client.end();
response.send(whole);
}).catch((e) => {
console.error(e.stack);
});
Alternatively, you can use async/await depending on your babel version and presets/plugins.
const { Client } = require("pg");
const config = {...};
const queryText = "SELECT * FROM entries where id='1'";
app.get("/", async (request, response) => {
const client = new Client(config);
await client.connect();
try {
const queryResponse = await client.query(queryText);
// Send response without pushing to array
response.send(queryResponse.rows[0]);
client.end();
} catch (e) {
console.error(e.stack);
}
});

Categories

Resources