MongoDb forEach asynchronous callback - javascript

I am using the Request npm module to extract data from an api and insert it into fields in my Mongo database. Afghanistan, the first country in the database, is the only document that gets populated with data. Every other country gets skipped.
The following code console logs the "beginning" and the "end" sequentially, and then logs the country name. I get that this is the result of the asynchronous nature of javascript, but I'm not sure how it affects this code.
Shouldn't the console log:
begin
country
end
etc...
Here the code:
MongoClient.connect(url, function(err, db) {
db.collection('countries').find().forEach( function(myDoc) {
console.log('beginning');
var code = myDoc.country.iso2;
var options = {
url: 'https://api.tugroup.com/v1/travelsafe/countries/' + code,
headers: {
['X-Auth-API-Key']: '*******',
['Content-Type']: 'application/x-www-form-urlencoded'
}
}
var callback = function(error, response, body) {
console.log(myDoc.country.name);
if (!error && response.statusCode == 200) {
var info = JSON.parse(body);
db.collection('countries').updateOne(
{ 'country.name' : myDoc.country.name },
{ $set: {
'safety.hasAdvisoryWarning' : info.hasAdvisoryWarning,
'safety.hasRegionalAdvisory' : info.hasRegionalAdvisory,
'safety.advisories' : info.advisories,
'safety.advisoryState' : info.advisoryState,
'safety.advisoryText' : info.advisoryText,
'safety.lawAndCulture' : info.lawAndCulture,
'safety.security' : info.safety}
},
function(err, result) {
console.log(err);
db.close();
});
}
}
request(options, callback);
console.log('end');
});
});
The console logs:
begin
end
begin
end
etc...
Algeria
American Samoa
Andorra
etc...

The problem that only the first one is updated is because you close the database connection (db.close) in the callback of updateOne so that after the first update is done your following connection to mongoDB is closed.
If your log output should be like
beginning
$country
end
then you have to move the logging into the callback of your requests or issue your requests sequentially.

Related

How to solve nodejs uncaughtException: Connection already released error and MaxListenersExceededWarning?

I am building an express server to receive request (a dict with 10 items) from my react front end and then save the data to database. Below is my code. I found that my code is work and the query does save the record back to Db. But in each for loop, this error is returned in server. What cause this error and the MaxListenersExceededWarning?
The request data:
{{.....}, {.....}, {.....}, {.....}, {.....}} #10 item
Code:
connection.js:
const p = mysql.createPool({
"connectionLimit" : 100,
"host": "example.org",
"user": "test",
"password": "test",
"database": "test",
"multipleStatements": true
});
const getConnection = function(callback) {
p.getConnection(function(err, connection) {
callback(err, connection)
})
};
module.exports = getConnection
routers.js
router.post('/test', (req, res) => {
getConnection(function(err, conn){
if (err) {
return res.json({ success: false, error: err })
} else {
const dict = req.body;
Object.keys(dict).forEach(function(r){
#putting dict's value to query
query = "UPDATE ......;"
conn.query(query, function (err, result, fields) {
conn.release()
console.log(query)
if (err) {
console.log("err")
return res.json({ success: false, error: err });
}
});
});
}
});
return res.json({ success: true });
});
Error:
error: uncaughtException: Connection already released
Error: Connection already released
at Pool.releaseConnection (/home/node_modules/mysql/lib/Pool.js:138:13)
at PoolConnection.release (/home/node_modules/mysql/lib/PoolConnection.js:35:15)
at Query.<anonymous> (/home/routes/test.js:276:22)
at Query.<anonymous> (/home/node_modules/mysql/lib/Connection.js:526:10)
at Query._callback (/home/node_modules/mysql/lib/Connection.js:488:16)
at Query.Sequence.end (/home/node_modules/mysql/lib/protocol/sequences/Sequence.js:83:24)
at Query._handleFinalResultPacket (/home//node_modules/mysql/lib/protocol/sequences/Query.js:149:8)
at Query.OkPacket (/home//node_modules/mysql/lib/protocol/sequences/Query.js:74:10)
at Protocol._parsePacket (/home//node_modules/mysql/lib/protocol/Protocol.js:291:23)
at Parser._parsePacket (/home//node_modules/mysql/lib/protocol/Parser.js:433:10)
(node:15881) MaxListenersExceededWarning: Possible EventEmitter memory leak detected. 11 finish listeners added. Use emitter.setMaxListeners() to increase limit
One connection (conn) is being retrieved from the pool, and is used to launch 10 queries in the forEach loop.
When the first query finishes to run, the first step of its callback is: conn.release(). The connection is released.
When the second query finishes to run, its callback also tries to release the connection,causing the error.
This problem might be solved in multiple ways:
Solve using a counter
In the callback of the database query, before calling call.release, check the number of queries already processed, and only close the connection when the last product is being processed.
const dict = req.body;
// initialize counter
let itemCount = 0
, errors = []
Object.keys(dict).forEach(function(r){
#putting dict's value to query
query = "UPDATE ......;"
conn.query(query, function (err, result, fields) {
// check whether this is the last callback
if (itemCount === dict.length-1) {
conn.release()
let result = errors.length ? { success: false, error: errors } : { success: true }
res.json(result)
}
// increment counter
itemCount++
console.log(query)
if (err) {
console.log("err")
errors.push(err)
}
});
});
Edit: There is also an issue with the res.json calls: inside the code in the question, res.json({ success: true }) is always executed, without waiting for the queries' execution results. The modified code sample above calls res.json only once after the execution of all queries, this is the only place where res.json should be called. This implies modifying the client-side code so that it can handle an array of errors, rather than only one error.
Solve by using a recursive function instead of for loop.
It is not a good practice to use for loops for the execution of asynchronous code. You might run into Maximum call stack size exceeded errors whenever the data volume gets too large.
Instead, create a recursive function (e.g. updateDictItem) to process one update query at a time. Read more about the asynchronous patterns in node.js in this article.
Other possible enhancements
Rather than firing ten database queries, it is worth considering grouping all the updates in one MERGE update statement, otherwise doing all the updates in a TRANSACTION.

NodeJs Perform Operation on Each returned MySql Result Object

Im using NodeJs to fetch data from mysql database, from the result object/array i want to perform specific action (call web service) with data returned from mysql. unfortunately whenever i iterate through mysql result and call web service only one object from the mysql resultset is called against web service. Here are my codes.
app.get('/blt', function(req, res){
var sql = "SELECT dest, amt FROM dit WHERE dest != 'DEST' && amt != 'AMT'";
pool.getConnection(function(err, conn){
if(err){
console.log("MYSQL Connection Error: "+err);
}
else{
conn.query(sql, function(err2, result){
if(err2){
console.log("Some error: "+err2);
}
else{
//console.log(JSON.stringify(result));
Object.keys(result).forEach(function(key) {
let result_ = result[key];
let sn = Math.floor((Math.random() * 900000000) + 100000000);
let dacc_ = result_.dest;
console.log(dacc_);
let blDetail = {
"BalDto": {
"Src": "XXXXXX",
"Code": 1,
"Amt": 10,
"Dac": dacc_, //Variable from mysql resultset
"Cmt": "Ok",
"SN": sn
}
};
soap.createClient(url, function(err, client) {
client.addSoapHeader(AuthHeader);
client.BalTransfer(blDetail, function(err, result) {
if (err) {
console.log(err.body);
} else {
console.log("done");
}
});
});
}
);
}
conn.release(function(errs){
if(errs){
console.log(errs)
}
});
});
}
});
});
I tried to use async but still i got the same results, only one of the returned mysql result set is sent to web service.
I want all the result set to be sent to webservice one by one, Kindly Help
You're literally iterating through your response from MySQL, which is a list of something, then you're making the soap call on every single one of them.
What you need to do is this:
Check the soap client and what they accept.
See if it accepts an array of your BalDto Objects
Utilize const listOfBalDtos = object.keys(result).map(); to do your transformation
Make the request:
soap.createClient(url, function(err, client) {
client.addSoapHeader(AuthHeader);
client.BalTransfer(listOfBalDtos);
});
try something like this.

create a website uptime monitor in Node.js

I want to create a uptime monitor using NodeJS and MongoDB. I want to run a cron job in NodeJS and store the data into MongoDB. If the website response status code is not equal to 200 then it will be saved in the database. I want to make a database entry like this,
url : http://www.google.com
status_code : 500
start_time :- start time
end_time :- end time
I can run the cron job but not sure how to save the downtime in the database. As, I don't want to store every response into the database. Only when response status code is other than 200 , then it will start tracking (start_time) the URL and it keeps the time when website is back to 200 as end_time.
cron.js :-
var async=require('async');
const Entry = require('../models/health.model.js');
var https = require('https');
var request = require('request');
module.exports = function getHttpsRequests () {
Entry.find({},function(err,entrys){
console.log(err);
if(!err && entrys){
async.each(entrys,function(entry,callback){
request(entry.url, function (error, response, body) {
entry.statuscheck=response.statusCode;
entry.save();
callback();
});
},function (error) {
});
}
});
}
health.model.js :-
const mongoose = require('mongoose');
const EntrySchema = mongoose.Schema({
url: String,
statuscheck: String
}, {
timestamps: true
});
module.exports = mongoose.model('Entry', EntrySchema);
I would do something like this to handle updating the database. I went ahead and put standard arrow functions in, because it was easier for me that way. I put some comments in so that should clear most questions up. It may not be the most elegant solution because I wrote it in 5 minutes, but if you follow this general logic flow, you should be much closer to your solution (its completely untested mind you.)
var async=require('async');
const Entry = require('../models/health.model.js');
var https = require('https');
var request = require('request');
module.exports = function getHttpsRequests () {
Entry.find({}, (err,entrys) => {
console.log(err);
if (!err && entrys) {
async.each(entrys, (entry,callback) => {
request(entry.url, (error, response, body) => {
//first check if the url has a document in the db.
Entry.find({ url: entry.url }, (err, entry) => {
if(!entry) {
//since the document does not exist, check the statusCode.
if(response.statusCode===200) { //if the statusCode is 200, continue the loop.
callback();
} else { //if the status code is not 200, lets save this to the db.
console.log("Saving object: " + entry)
entry.status_code = response.statusCode;
entry.start_time = new Date();
entry.save();
callback();
}
} else if (entry) {
//since the document exists, lets check the statusCode.
if(response.statusCode===200) { //if the statusCode is 200, update the stop_time.
entry.end_time = new Date();
Entry.findOneAndUpdate({ url: entry.url }, entry, (err, object) => { //this returns the entry after update, so we can put that in the console for easy debug.
if (err) {
console.log(err);
callback();
} else {
console.log("Object saved: " + object);
callback();
}
});
}
} else { //there was an error finding the document in the db, just go to the next one.
callback();
});
});
});
}
});
}

How to watch changestream in mongoDB and send the updates to an ajax call?

I'm using the new feature of MongoDB 3.6 watch() in order to send updates of the database from the node server to ajax in the client side.
I created a webservice queried periodically by an ajax call. Between two successive ajax calls, i want the second one to get all the updates that occurred in the mean time. I know that i have to resume the change stream as shown in the official documentation here : https://docs.mongodb.com/manual/changeStreams/#resume-a-change-stream . However i didn't find out how to apply this to my specific needs, by that i mean, in which callback can i process my data and send it to the webservice response ?
Here is a part of my server side code : server.js
const pipeline = [
{
$match : {
"operationType" : "insert"
,
"fullDocument.T" : { "$exists":true}
}
},
{
$project: { "fullDocument.ts": 1,
"fullDocument.T":1}
}
];
function getLiveData(handler){
console.log("in getLiveData");
var liveArray=[];
var resumeToken;
const changeStream = dbObject.collection('status').watch(pipeline);
changeStream.hasNext(function(err, change) {
if (err) return console.log(err);
expect(err).to.equal(null);
expect(change).to.exist;
console.log("in changeStream.hasNext");
changeStream.next(function(err, change) {
if (err) return console.log(err);
expect(err).to.equal(null);
console.log("in changeStream.next");
resumeToken = change._id;
expect(change._id).to.exist;
expect(changeStream.resumeToken).to.exist;
changeStream.close(function(err) {
if (err) return console.log(err);
expect(err).to.equal(null);
console.log("in changeStream.close");
const newChangeStream = dbObject.collection('status').watch({ resumeAfter: resumeToken });
newChangeStream.next(function(err, next) {
if (err) return console.log(err);
expect(err).to.equal(null);
expect(next).to.exist;
console.log("in newChangeStream.next");
//my own code
newChangeStream.on("change", function(change) {
console.log('in change stream, change : ',change);
liveArray.push([change.fullDocument.ts, change.fullDocument.T]);
var response = {
"liveArray" : liveArray
};
console.log("from getLiveData : " , response);
handler(response);
});
//my own code
// Since changeStream has an implicit seession,
// we need to close the changeStream for unit testing purposes
newChangeStream.close();
});
});
});
});
}
webservice part :
app.get("/liveDataRequest", function(req, res){
getLiveData(function(data){
console.log("in handler", data);
res.status(200).send(data);
});
And here is the console log, as we can see, the part where i process my data never gets called :
in getLiveData
in changeStream.hasNext
in changeStream.next
in changeStream.close
in newChangeStream.next
in getLiveData
in changeStream.hasNext
in changeStream.next
in changeStream.close
in newChangeStream.next

Done function never called after $.ajax

I'm a bit new to all this (including Javascript callbacks and ES6). I'm using NodeJS + Express + MongoDB.
I'm calling an Ajax function to update an item and the success Ajax call is never done.
Here is my Ajax call (called from React)
editBug : function(bug){
console.log('about to edit bug with these values',bug);
$.ajax({
url:'/api/bugs',
method: 'PUT',
data:bug
})
.done((jqxhr) => {
console.log('succcess while editing the bug');
this.setState({successVisible : true});
})
.fail((jqxhr) => {
console.log('error : ' + jqxhr);
})
},
Here is my API function:
app.put('/api/bugs',function(req,res){
//console.log('req',req);
console.log('query string : ',req.query);
console.log('query params : ',req.params);
console.log('query body: ',req.body);
let id = new ObjectID(req.body._id);
req.body._id = new ObjectID(req.body._id);
db.collection('bugs').replaceOne(
{_id:id},
req.body,
function(err,result){
assert.equal(err,null);
console.log('Successfull replace!');
res.status(200);
}
);
});
The Successfull replace! log is correctly shown on the server side.
The about to edit bug with these values is correctly shown on the front side. But the succcess while editing the bug log is not shown on front end and it seems .done call is never executed.
The problem is that you are not sending any response back to the browser on node side. Try the following snippet and you should be good to go
Also, I'd like to point out that you should handle the errors. While updating the bugs if something goes wrong, the best practice would be to inform the browser with the 500 status code indicating that the intended action failed. I've added this aspect in the snipped below
app.put('/api/bugs', function(req, res) {
//console.log('req',req);
console.log('query string : ', req.query);
console.log('query params : ', req.params);
console.log('query body: ', req.body);
let id = new ObjectID(req.body._id);
req.body._id = new ObjectID(req.body._id);
db.collection('bugs').replaceOne({
_id: id
},
req.body,
function(err, result) {
if (err) {
console.log('Failed replace');
res.status(500).end(); // <- We set the response status code and end the request
} else {
assert.equal(err, null);
console.log('Successfull replace!');
res.status(200).end(); // <- We set the response status code and end the request
}
}
);
});
Don't you need to end your response object on the Node.js side?
Try adding res.end(); or any kind of response to your response object.
Also, you can use chrome's (or any other browser's) network tab to actually see how your AJAX requests end up, to see if they hang or finish.

Categories

Resources