How to wait for moongose to finish query? - javascript

router.post('/logout', function(req, res) {
if(req.body.newList){
var aux= JSON.parse(req.body.newList);
var aux1;
var i=0;
while(i<aux.length){
aux1=aux[i];
Task.findOne({username:aux1.username,text:aux1.text},function(err,res){
if(res){
i++;
}else{
task= new Task({username:aux1.username,text:aux1.text,state:aux1.state});
task.save(function(err){
i++;
});
}
});
}
}
Well, I have this so far. aux stores an array of json objects, they dont have an _id, so.. I need to iterate through them saving one by one when needed. Problem is (I figure) async, cause this code is saving in the data base only the last element of the array n times, being n the amount of diferent elements which should be saved**(see example). So, I think the async behavior of moongose querys is giving me the headache. Probably keeps iterating waiting for the result of the query, and then save with the last value of the array.
** for instance, if I have these values for save [{username:'x',text:'a'},{username:'x',text:'aa'},{username:'x',text:'aaa'}]..
It saves [{username:'x',text:'aaa'},{username:'x',text:'aaa'},{username:'x',text:'aaa'}]
I tried using promises, but it didnĀ“t work.. Probably used them wrong

For such things I prefer using the caolan's async library. Simply do:
npm install async to add the module.
Then replace your code with:
router.post('/logout', function(req, res) {
if (req.body.newList) {
var aux = JSON.parse(req.body.newList);
var async = require("async");
async.each(aux, function(aux1, callback) {
Task.findOne({
username: aux1.username,
text: aux1.text
}, function(err, res) {
if (res) {
callback();
} else {
task = new Task({
username: aux1.username,
text: aux1.text,
state: aux1.state
});
task.save(function(err) {
callback();
});
}
});
}, function(err) {
// if any of the query processing produced an error, err would equal that error
if (err) {
// One of the iterations produced an error.
// All processing will now stop.
console.log('A query failed to process');
} else {
console.log('All queries have been processed successfully');
//do other things here
}
});
}
});

Related

My Node Script Hangs after functions are finished

I'm calling three functions, after the completion of these functions I want my script to close on it's own but it just hangs.
I've tried making the functions async/promise based, closing the database after each 'mongodb' type function, and using process.exit() within a function as a callback to the last called function.
Connecting to the (local - not Atlas) Database:
MongoClient.connect(local, {useNewUrlParser: true, useUnifiedTopology: true}, function(err, db) {
if (err) {
console.log(err)
}
else {
console.log('Connected to MongoDB...')
//Read in data from jsonfiles and store each file's contents into the database : This is where the functions are being called... within a successful connect to the MongoDB
insertJSON(db, jsonfiles, 'requests', jsonfilesSource)
insertJSON(db, issuedfiles, 'issuedLicenses', isssuedfilesSource)
insertLicenses(db)
}
db.close()
})
Function 1:
function insertJSON(db, dirBuf,collection, sourceFolder) {
var database = db.db('license-server')
var collection = database.collection(collection)
fs.readdir(dirBuf, function(err, files) {
if (err) {
console.log(err.message)
}
else {
files.forEach(function(filename) {
var text = fs.readFileSync(sourceFolder + filename);
var filecontents = JSON.parse(text)
//collection.insertOne(filecontents)
collection.findOne({"DisplayTitle" : filecontents.DisplayTitle, "NodeInformation" : filecontents.NodeInformation, "Date": filecontents.Date})
.then(function(result) {
if(result) {
console.log(`An Item could already be in the database: A file is unique if its display title, nodeinformation, and date are different.
the items display title is ${result.DisplayTitle}`)
return
}
else {
collection.insertOne(filecontents)
console.log(`Added ${filecontents.DisplayTitle} to database`)
}
})
.catch(function(error) {
console.log(error)
})
})
}
})
}
Function 2:
function insertLicenses(db) {
// Set up GridFS to import .lic and .licx files into the database
var database = db.db('license-server')
var collection = database.collection('fs.files')
var bucket = new mongodb.GridFSBucket(database);
var dirBuf = Buffer.from('../license-server/private/licenses')
fs.readdir(dirBuf, function(err, files) {
if (err) {
console.log(err.message)
}
else {
files.forEach(function(filename) {
collection.findOne({"filename": filename}).
then(function(result) {
if(result) {
console.log(`The file ${filename} is already in the database`)
return
}
else {
fs.createReadStream('./private/licenses/' + filename).
pipe(bucket.openUploadStream(filename)).
on('error', function(error) {
assert.ifError(error)
}).
on('finish', function() {
console.log(`Uploaded ${filename}`)
})
}
})
})
}
})
// I tried calling db.close() here since this is the last function to be called. No luck.
}
I'm guessing it has something to do with the mongodb functions having their own way to close themselves but I couldn't seem to find what I was looking for in previous attempts to resolve this issue.
The expected result should be the script closing itself, the actual result is a handing script.
All of these database calls are asynchronous -- the result of this code running is to immediately call db.close and then do the work in insertJSON and insertLicenses. If you were to rewrite this to use async/await (and you'd need to update your other functions as well) the db.close call would close the db, and that would allow the script to exit:
await insertJSON(db, jsonfiles, 'requests', jsonfilesSource)
await insertJSON(db, issuedfiles, 'issuedLicenses', isssuedfilesSource)
await insertLicenses(db)
db.close()
https://developer.mozilla.org/en-US/docs/Learn/JavaScript/Asynchronous/Introducing
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function

Async confusion in nodejs function

I always have multiple operations in one route or endpoint. Take an example below, when a user deletes an item, I want the related file be deleted in s3 too besides deleting related collection from the database.
So is the code below ok? Does it matter if I put the first function (delete file from s3) inside the DeleteItem function?
router.post('/item/delete', function(req, res) {
if(req.body.dlt_item){
var tempArray = [];
tempArray.push({"Key":req.body.dlt_item});
s3Bucket.deleteObjects({
Bucket: 'myS3',
Delete: {
Objects: req.body.dlt_item
}
}, function(err, data) {
if (err)
return console.log(err);
});
}
Item.DeleteItem(req.body.item_id, function(err,result){
if(err){console.log(err)}
res.send({result:1});
})
});
You should organise your code like this. This will ensure that s3 deletion will start only when mongodb deletion has finished.
In your code both things happen simultaneously. this may cause issue in some cases.
If one fails and other succeeds then there will be trouble. Suppose s3 files get deleted successfully and mongo deletion fails. Then you will have many references to non existing resources.
router.post('/item/delete', function(req, res) {
if(req.body.dlt_item){
var tempArray = [];
tempArray.push({"Key":req.body.dlt_item});
Item.DeleteItem(req.body.item_id, function(err,result){
if(err)
{
console.log(err)
res.send(err);
}
else
{
//deletion from mongodb is succesful now delete from s3
s3Bucket.deleteObjects({
Bucket: 'myS3',
Delete: {
Objects: req.body.dlt_item
}
},function(err, data) {
if (err)
{
// deletion from s3 failed you should handle this case
res.send({result:1});
return console.log(err);
}
else
{
// successful deletion from both s3 and mongo.
// If you do not want to wait for this then send the response before this function.
res.send({result:1});
}
});
}
})
});

Closing a Mongoose connection in a script which does not run forever

I have a JavaScript program which is supposed to run for a brief period of time, insert rows into a MongoDB database, and then exit. Here is the cut down version of the application:
var mongoose = require('mongoose');
var models = require('./models');
mongoose.connect('mongodb://localhost/test')
var db = mongoose.connection;
db.on('error', console.error.bind(console, 'connection error:'));
db.once('open', function callback() {
var row = models('testschema')({
name : 'test'
});
row.save(function (err, obj) {
if (err) {
console.log(err);
} else {
console.log('Saved item.');
}
});
console.log('Closing DB');
db.close();
});
Now the above doesn't work properly, as the item never gets into the database. My feeling is that because save() is async, the db.close() is happening first and the item never gets saved. If I move the db.close() call into the callback for save, as so:
row.save(function (err, obj) {
if (err) {
console.log(err);
} else {
console.log('Saved meeting details');
}
console.log('Closing DB');
db.close();
});
Then it works fine. However this isn't much practical help, as it means I can only write one row before needing to close the database. My question is, how do I close the Mongoose connection properly when I am in this situation:
var mongoose = require('mongoose');
var models = require('./models');
mongoose.connect('mongodb://localhost/test')
var db = mongoose.connection;
db.on('error', console.error.bind(console, 'connection error:'));
db.once('open', function callback() {
itemsToSave.forEach(function(item) {
var row = models('testschema')({
name : item.name
});
row.save(function (err, obj) {
if (err) {
console.log(err);
} else {
console.log('Saved meeting details');
}
// Can't do this here.
//console.log('Closing DB');
//db.close();
});
});
// Nor here.
//console.log('Closing DB');
//db.close();
});
Edit: Here is the final version using C Blanchard's answer below. I should note, while it does achieve the desired result, I feel it has lost the convenience of mongoose at this point. If you are going to batch up calls to save() like this, you might as well take advantage of MongoDB's underlying bulk insert functionality and just use that to do the insert. I will probably do this task in another language as the async nature of node.js seems to make it nearly impossible to write elegant code to do something such as "open text file, for each line insert it into a database, close connection and exit". Anyhow, without further adieu, the final program:
var mongoose = require('mongoose');
var models = require('./models');
var async = require("async");
var rowsToSave = [];
var saveRow = function(item) {
return function (callback) {
console.log('Saving meeting details');
item.save(callback);
};
}
mongoose.connect('mongodb://localhost/test')
var db = mongoose.connection;
db.on('error', console.error.bind(console, 'connection error:'));
db.once('open', function callback() {
rowsToSave.push(saveRow(models('testschema')({ name: 'name1' })));
rowsToSave.push(saveRow(models('testschema')({ name: 'name2' })));
rowsToSave.push(saveRow(models('testschema')({ name: 'name3' })));
rowsToSave.push(saveRow(models('testschema')({ name: 'name4' })));
console.log(JSON.stringify(rowsToSave));
async.series(rowsToSave, function (err, res) {
if (err) {
console.log(err);
} else {
console.log('Saved meeting details');
}
console.log('Closing DB');
db.close();
});
});
The other approach, which in some ways is nicer when looking at the code, but is also a horrible horrible hack to get around this deficiency, is to simply guesstimate the required time for the script and then close the db after this time has elapsed:
setTimeout(function() { db.close(); }, 5000);
I wouldn't blame anyone for doing this, MongoDB & Mongoose have forced you into a terrible position.
You can manage the control flow by using the async library. By using one of the control flow methods you can call the disconnect on Mongoose once all your saves are done.
What you could do is store all your save operations inside of an array and then pass it to async.series. async.series takes two arguments. 1) An array of functions to invoke in series. 2) A function which is invoked when all the functions in argument 1 are called.
Here's a sketch of a solution which follows the method described above:
// Bring in async library
var async = require("async");
// Create an array to store your save operations
var rowsToSave = [];
// Returns a function that will save a row when invoked
var saveRowMethod = function (item) {
var row = models('testschema')({
name : item.name
});
return function (callback) {
row.save(callback);
};
}
db.once('open', function callback() {
itemsToSave.forEach(function(item) {
// Store the row to save
rowsToSave.push(saveRowMethod(item))
});
// This will invoke each save operation in rowsToSave (in series)
async.series(rowsToSave, function (error, result) {
if (error) {
// Handle an error if one of the rows fails to save
}
console.log('Closing DB');
db.close();
})
});
Note that the functions you pass into rowsToSave must accept and invoke a callback when the save is complete. That's how async is able to track when the operations are done.
Update: I gave it more thought after your comment. Found a neater solution which relies only on Model#create - however Mongoose does not support bulk inserts for now (Issue #723)
Model#create accepts an array of objects and will do something similar to the solution I outlined above except it does not require async.
var rowsToSave = [];
rowsToSave.push({ name: 'name1' });
rowsToSave.push({ name: 'name2' });
rowsToSave.push({ name: 'name3' });
rowsToSave.push({ name: 'name4' });
TestModel.create(rowsToSave, function (err, name1, name2, name3, name4) {
if (err) {
console.log(err);
} else {
console.log('Saved meeting details');
}
console.log('Closing DB');
db.close();
});

node.js - express - res.render() : Correct format for feeding variables into the JSON parameter?

I'm learning node.js so bear with me.
I'm trying to create a node.js web application using express+jade that is basically just a line queue. (I.E. take a number, wait in line, now serving number 4...except the 4 will be a mysql table field). The page will auto-update every 5 seconds. There are three line queues handled by the page (I.E) :3000/1 :3000/2 :3000/3.
To be clear, I have the application working, but I want to make sure I am doing it correctly as opposed to just hacking it together with poor methodology.
In my index.js I have the standard setup:
exports.bio = function(req, res){
res.render('index', {
location: 'Biometrics',
number: bio()
});
};
exports.interview = function(req, res){
res.render('index', {
location: 'Interview',
number: interview()
});
};
exports.docs = function(req, res){
res.render('index', {
location: 'Documentation',
number: doc()
});
};
I am currently also calling the values for the "number:" JSON value from within the index.js as well.
var doc = (function() {
//do javascript and data calls
return a;
});
var bio = (function() {
//do javascript and data calls
return a;
});
var interview = (function() {
//do javascript and data calls
return a;
});
My question is: What would be the recommended way to do this or am I on the right track?
This will work as long as the functions doc(), bio(), and interview() are synchronous, but most likely that won't be the case, particularly if they need to perform some database access.
If these functions were async then your could should look like this:
exports.docs = function(req, res){
// call the doc() function and render the response in the callback
doc(function(err, number) {
res.render('index', {
location: 'Documentation',
number: number
});
});
};
The doc() function will look like this:
var doc = (function(callback) {
// This code very likely be async
// therefore it won't return any value via "return"
// but rather calling your function (callback)
db.doSomething(someparams, callback);
});
Inside db.doSomething() there will be a call to your function callback(err, theValue)
The asynchonous way would be something like:
exports.docs = function(req, res) {
fetchSomeValueFromMysql(req.param('foo'), function (err, data) {
if (err) {
res.send(500, 'boom!');
return;
}
res.render('index', {
location: 'Documentation',
number: data
});
});
};
Say if you had an async operation in bio();
exports.bio = function (req, res) {
bio(function (err, data) {
if (!err && data) {
res.render('index', {
location: 'Biometrics',
number: data
});
} else {
// handle error or no data
res.render('error');
}
});
}
var bio = function(cb) {
//do javascript and data calls
cb(err, data);
});
Again, there are many ways to get this working. But the above should do.

index a mongo db with javascript

I have working script which stores the html forms i have created in mongoDB. It works awesome. However, i can't search any of the data i place in mongo, because i don't have an index.
I realize I could create the index from the console, but for my system to work the way we need, I really need the index to be created when the data is stored. So, i need to place code in the javascript that actually creates the code 9using node.js or directly).
I tried the following javascript (with node.js), but it does not appear to work.
app.post('/:db/:collection/formSubmit', function(req, res) {
var json = form2json.transform(req.rawBody);
var db = new mongo.Db(req.params.db, new mongo.Server(config.db.host, config.db.port,mongoOptions ));
db.open(function(err, db) {
db.authenticate(config.db.username, config.db.password, function () {
db.collection(req.params.collection, function(err, collection) {
collection.insert(Array.isArray(json) ? json[0] : json, function(err, docs) {
res.header('Content-Type', 'application/json');
if(req.is('application/xml')) {
res.send('<ok>1<ok>')
} else {
es.send(json, 201);
}
// my attempt to create an index while posting a form follows
db.core.ensureIndex( { "document": 1 } )
db.close();
});
});
});
});
});
You need to call ensureIndex on the collection:
collection.ensureIndex({ "document": 1 }, function (err, indexName) {
db.close();
});

Categories

Resources