I wrote node.js script and it works not as expected.
Here is my js code, that describes mysql connection;
var mysql = require('mysql');
var createConnectionMYSQL;
var connectCount=0;
(createConnectionMYSQL = function () {
con = mysql.createPool({
connectionLimit:10,
host: "*****",
user: "*****",
password: "********",
database: "dbname"
});
console.log(++connectCount);
})();
con.query("SET SESSION wait_timeout = 120");
con.query('set names utf8');
con.on('error', function (err) {
if (err.code === 'PROTOCOL_CONNECTION_LOST') {
createConnectionMYSQL();
} else {
throw err;
}
});
Main function returns promise that I handle.
function getRecipes(str, page) {
return new Promise(function (resolve, reject) {
//unimportant code
var sql = 'SELECT ID, Recept, MATCH (Recept) AGAINST ("+' + ingredients[0] + '*" IN BOOLEAN MODE) as REL FROM recipes WHERE (MATCH (Recept) AGAINST ("+' + ingredients[0] + '*" IN BOOLEAN MODE))>0 ORDER BY REL';
if (page != 0) sql += ' LIMIT ' + (page * 12) + ' ,12';
con.query(sql, function (err, result, fields) {
if (err) return reject(err);
// console.log(result + ' ' + sql);
resolve(result);
});
});
}
But I get strange behavior of mysql module. In getRecipes(par1,0).then(function(results){}) I get first empty array and one minute later I get normal array with results as if resolve() worked twice. But it's Lucky case. Sometimes I got more empty arrays and then expected array with results.
I think the return in 'return reject(err)' is unnecessary, but that shouldn't explain this behaviour.
Do you have the connection part in the same promise loop that handles the query? So that first you check with promise that connection is ok, and after that, you make the query? If not, then that might cause some problems.
Have you checked from the database, that how many requests are coming?
Related
In the code
var stuff_i_want = '';
stuff_i_want = get_info(parm);
And the function get_info:
get_info(data){
var sql = "SELECT a from b where info = data"
connection.query(sql, function(err, results){
if (err){
throw err;
}
console.log(results[0].objid); // good
stuff_i_want = results[0].objid; // Scope is larger than function
console.log(stuff_i_want); // Yep. Value assigned..
}
in the larger scope
stuff_i_want = null
What am i missing regarding returning mysql data and assigning it to a variable?
============ New code per Alex suggestion
var parent_id = '';
get_info(data, cb){
var sql = "SELECT a from b where info = data"
connection.query(sql, function(err, results){
if (err){
throw err;
}
return cb(results[0].objid); // Scope is larger than function
}
==== New Code in Use
get_data(parent_recording, function(result){
parent_id = result;
console.log("Parent ID: " + parent_id); // Data is delivered
});
However
console.log("Parent ID: " + parent_id);
In the scope outside the function parent_id is null
You're going to need to get your head around asynchronous calls and callbacks with javascript, this isn't C#, PHP, etc...
Here's an example using your code:
function get_info(data, callback){
var sql = "SELECT a from b where info = data";
connection.query(sql, function(err, results){
if (err){
throw err;
}
console.log(results[0].objid); // good
stuff_i_want = results[0].objid; // Scope is larger than function
return callback(results[0].objid);
})
}
//usage
var stuff_i_want = '';
get_info(parm, function(result){
stuff_i_want = result;
//rest of your code goes in here
});
When you call get_info this, in turn, calls connection.query, which takes a callback (that's what function(err, results) is
The scope is then passed to this callback, and so on.
Welcome to javascript callback hell...
It's easy when you get the hang of it, just takes a bit of getting used to, coming from something like C#
I guess what you really want to do here is returning a Promise object with the results. This way you can deal with the async operation of retrieving data from the DBMS: when you have the results, you make use of the Promise resolve function to somehow "return the value" / "resolve the promise".
Here's an example:
getEmployeeNames = function(){
return new Promise(function(resolve, reject){
connection.query(
"SELECT Name, Surname FROM Employee",
function(err, rows){
if(rows === undefined){
reject(new Error("Error rows is undefined"));
}else{
resolve(rows);
}
}
)}
)}
On the caller side, you use the then function to manage fulfillment, and the catch function to manage rejection.
Here's an example that makes use of the code above:
getEmployeeNames()
.then(function(results){
render(results)
})
.catch(function(err){
console.log("Promise rejection error: "+err);
})
At this point you can set up the view for your results (which are indeed returned as an array of objects):
render = function(results){ for (var i in results) console.log(results[i].Name) }
Edit
I'm adding a basic example on how to return HTML content with the results, which is a more typical scenario for Node. Just use the then function of the promise to set the HTTP response, and open your browser at http://localhost:3001
require('http').createServer( function(req, res){
if(req.method == 'GET'){
if(req.url == '/'){
res.setHeader('Content-type', 'text/html');
getEmployeeNames()
.then(function(results){
html = "<h2>"+results.length+" employees found</h2>"
html += "<ul>"
for (var i in results) html += "<li>" + results[i].Name + " " +results[i].Surname + "</li>";
html += "</ul>"
res.end(html);
})
.catch(function(err){
console.log("Promise rejection error: "+err);
res.end("<h1>ERROR</h1>")
})
}
}
}).listen(3001)
Five years later, I understand asynchronous operations much better.
Also with the new syntax of async/await in ES6 I refactored this particular piece of code:
const mysql = require('mysql2') // built-in promise functionality
const DB = process.env.DATABASE
const conn = mysql.createConnection(DB)
async function getInfo(data){
var sql = "SELECT a from b where info = data"
const results = await conn.promise().query(sql)
return results[0]
}
module.exports = {
getInfo
}
Then, where ever I need this data, I would wrap it in an async function, invoke getInfo(data) and use the results as needed.
This was a situation where I was inserting new records to a child table and needed the prent record key, based only on a name.
This was a good example of understanding the asynchronous nature of node.
I needed to wrap the all the code affecting the child records inside the call to find the parent record id.
I was approaching this from a sequential (PHP, JAVA) perspective, which was all wrong.
Easier if you send in a promise to be resolved
e.g
function get_info(data, promise){
var sql = "SELECT a from b where info = data";
connection.query(sql, function(err, results){
if (err){
throw err;
}
console.log(results[0].objid); // good
stuff_i_want = results[0].objid; // Scope is larger than function
promise.resolve(results[0].objid);
}
}
This way Node.js will stay fast because it's busy doing other things while your promise is waiting to be resolved
I've been working on this goal since few weeks, without any result, and I finally found a way to assign in a variable the result of any mysql query using await/async and promises.
You don't need to understand promises in order to use it, eh, I don't know how to use promises neither anyway
I'm doing it using a Model class for my database like this :
class DB {
constructor(db) {
this.db = db;
}
async getUsers() {
let query = "SELECT * FROM asimov_users";
return this.doQuery(query)
}
async getUserById(array) {
let query = "SELECT * FROM asimov_users WHERE id = ?";
return this.doQueryParams(query, array);
}
// CORE FUNCTIONS DON'T TOUCH
async doQuery(queryToDo) {
let pro = new Promise((resolve,reject) => {
let query = queryToDo;
this.db.query(query, function (err, result) {
if (err) throw err; // GESTION D'ERREURS
resolve(result);
});
})
return pro.then((val) => {
return val;
})
}
async doQueryParams(queryToDo, array) {
let pro = new Promise((resolve,reject) => {
let query = queryToDo;
this.db.query(query, array, function (err, result) {
if (err) throw err; // GESTION D'ERREURS
resolve(result);
});
})
return pro.then((val) => {
return val;
})
}
}
Then, you need to instantiate your class by passing in parameter to constructor the connection variable given by mysql. After this, all you need to do is calling one of your class methods with an await before. With this, you can chain queries without worrying of scopes.
Example :
connection.connect(function(err) {
if (err) throw err;
let DBModel = new DB(connection);
(async function() {
let oneUser = await DBModel.getUserById([1]);
let allUsers = await DBModel.getUsers();
res.render("index.ejs", {oneUser : oneUser, allUsers : allUsers});
})();
});
Notes :
if you need to do another query, you just have to write a new method in your class and calling it in your code with an await inside an async function, just copy/paste a method and modify it
there are two "core functions" in the class, doQuery and doQueryParams, the first one only takes a string as a parameter which basically is your mysql query. The second one is used for parameters in your query, it takes an array of values.
it's relevant to notice that the return value of your methods will always be an array of objects, it means that you'll have to do var[0] if you do a query which returns only one row. In case of multiple rows, just loop on it.
I call getLogs() through a post request and get a list of LogFileID(filename) from a DB and then I pass this LogFileID to do an additional request by calling _getLogFileUrls which gives me a signed url for that ID in response. I push all of them one by one into a global array and return it the end response.
I know it's incorrect to use setTimeout but the problem is not using, it gives me a different result into the array every time. What could I do to resolve this issue? How do I correct this code so that the loop iterates to the next only when the signed url is stored into the global array.
function _getLogFileUrls(logFileId, callback){
var request = require('request'),
config = require('../../config.js');
var fileParams = {
fileName: 'xyzdirectory/' + logFileId
};
request.post({
url: config.filesServiceUrl + 'get-logfile-urls',
json: fileParams
},function(error, response, body) {
if (!error && response.statusCode === 200) {
callback(body);
} else {
res.status(400).send('Error requesting file service for logs:');
}
}).on('error', function(err) {
console.log('File service error for Logs: ' + err);
});
}
function getLogs(req, res){
if(!req.body.id){
return res.status(400).send('Please check the params!');
}
var date;
if(req.body.date){
date = req.body.date;
} else {
date = new Date().toISOString().slice(0,10);
}
var sqlQuery = "SELECT `LogFileID` FROM `logs_data` WHERE `EmpID` = '" + req.body.id + "' AND DATE(`Timestamp`) = '" + date + "'",
resArray= [];
hitThisQueryForMe(sqlQuery, res, function(rows){
if(!rows.length) res.json(rows);
_.each(rows, function(item){
console.log('item: ' + item.LogFileID);
_getLogFileUrls(item.LogFileID, function(response){
resArray.push(response);
});
});
setTimeout(function(){
res.send(resArray);
resArray = [];
}, 4000);
});
}
SQL injection alert
First of all, your code has a serious SQL injection vulnerability. Never use string concatenation to create SQL using user-provided data or otherwise anyone will be able to read, modify and delete anything in your database. This is very serious security issue. For more details see those answers:
cannot use backtick when using nodejs 7.3.0
How to escape mysql special characters with sockets.io/node.js/javascript
The answer
Now to answer your question. To handle what you try to do here you should either stick to callbacks and use a good module to handle concurrency like Async:
https://caolan.github.io/async/
Or you can use promises with a good module to help with concurrency like Q or Bluebird:
http://documentup.com/kriskowal/q/
http://bluebirdjs.com/
Additionally when working with promises you can use generator-based coroutines with tools like co or Bluebird.coroutine:
https://github.com/tj/co
http://bluebirdjs.com/docs/api/promise.coroutine.html
Or you can use ES8 async/await:
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function
Those are the main ways to handle cases like yours. Reinventing the wheel of concurrency handling can lead (as you can see here) to error-prone, hard to maintain code.
I recommend using the right tool for the job.
Use async/await
Install asyncawait library and its dependency bluebird:
npm install asyncawait --save
npm install bluebird --save
Your edited code should look like:
const async = require('asyncawait/async');
const await = require('asyncawait/await');
const Promise = require('bluebird');
const request = require('request');
const config = require('../../config.js');
function _getLogFileUrls(logFileId) {
return new Promise((resolve, reject) => {
var fileParams = {
fileName: 'xyzdirectory/' + logFileId
};
request.post({
url: config.filesServiceUrl + 'get-logfile-urls',
json: fileParams
}, function (error, response, body) {
if (!error && response.statusCode === 200) {
resolve(body);
} else {
reject('Error requesting file service for logs:');
}
}).on('error', function (err) {
console.log('File service error for Logs: ' + err);
});
});
}
function getLogs(req, res) {
if (!req.body.id) {
return res.status(400).send('Please check the params!');
}
var date;
if (req.body.date) {
date = req.body.date;
} else {
date = new Date().toISOString().slice(0, 10);
}
var sqlQuery = "SELECT `LogFileID` FROM `logs_data` WHERE `EmpID` = '" + req.body.id + "' AND DATE(`Timestamp`) = '" + date + "'",
resArray = [];
hitThisQueryForMe(sqlQuery, res, function (rows) {
if (!rows.length) res.json(rows);
_.each(rows, (async function (item) {
console.log('item: ' + item.LogFileID);
var logFileUrlResponse = await (_getLogFileUrls(item.LogFileID));
resArray.push(logFileUrlResponse);
}));
res.send(resArray);
resArray = [];
});
}
Select statements are working fine, but whenever I try an insert or update the recordset and affected values are undefined. The insert/update works in the DB, I just can't read the returned values.
var sql = require('mssql');
var config = {...};
sql.connect(config).then(function() {
new sql.Request().query("INSERT INTO MyTable (Name, Age) VALUES ('John', 30)").then(function(recordset, affected) {
console.log('Recordset: ' + recordset);
console.log('Affected: ' + affected);
}).catch(function(err) {
console.log('Request error: ' + err);
});
}).catch(function(err) {
if (err) {
console.log('SQL Connection Error: ' + err);
}
});
The output to console is:
Recordset: undefined
Affected: undefined
I feel like I must be missing something really simple here.
As mentioned in the comments, INSERT statement doesn't return a recordset so recordset is undefined. Please see this section of the docs to learn more about how to get number of affected rows.
The problem with your code is you're expecting affected as a second argument from the promise, but promises does only support one argument. Because of that you must access number of affected rows this way:
var sql = require('mssql');
var config = {...};
sql.connect(config).then(function() {
var request = new sql.Request();
request.query("INSERT INTO MyTable (Name, Age) VALUES ('John', 30)").then(function(recordset) {
console.log('Recordset: ' + recordset);
console.log('Affected: ' + request.rowsAffected);
}).catch(function(err) {
console.log('Request error: ' + err);
});
}).catch(function(err) {
if (err) {
console.log('SQL Connection Error: ' + err);
}
});
If you want id to be output parameter
const sql = require("mssql/msnodesqlv8");
const pool = new sql.ConnectionPool(dbConfig);`
const poolConnect = pool.connect();
let query = `INSERT INTO <table>(fields) VALUES(values);SELECT #id = SCOPE_IDENTITY()`
await poolConnect;
pool.request()
.output("id", sql.Int)
.query(query).then((err, result) => {
console.log(result.output.id)
}).catch(err => {
console.log(err)
})`
I am using Node.js with MySQL and restify.
I have the following code which is run as part of a REST API. It works fine.
server.get('/test', function (req, res, next) {
var query_string =
"SELECT DATE(date_transacted) AS transaction_date, " +
" MonthReports.tb AS MonthReports__tb " +
" FROM monthly_reports MonthReports " +
" WHERE ( date_transacted >= \'2015-01-00\' AND date_transacted <= \'2015-09-00\' ) ";
connection.query(
query_string
, function (err, rows, fields) {
if (err) throw err;
res.send(rows);
});
});
If I deliberately turn off the MySQL database and makes a REST API call which will run the query, I will get the error
Cannot enqueue Query after fatal error.
At this point, I turn on the MySQL database. The node.js process is unable to recover and the same error keeps appearing when I make a REST API call. The REST API server is dead.
What can be done to make the Node.js REST API server code recoverable?
I am assuming you are connecting globally inside your script.
One simple way would be to create a connection per request:
server.get('/test', function (req, res, next) {
var query_string =
"SELECT DATE(date_transacted) AS transaction_date, " +
" MonthReports.tb AS MonthReports__tb " +
" FROM monthly_reports MonthReports " +
" WHERE ( date_transacted >= \'2015-01-00\' AND date_transacted <= \'2015-09-00\' ) ";
var connection = getConnection(function connected(err) {
if (err) {
// error connecting to mysql! alert user
} else {
connection.query(
query_string
, function (err, rows, fields) {
if (err) throw err;
res.send(rows);
});
}
});
});
The above code is psuedo code as i'm not familiar with the node mysql library. This will allow each request to see if mysql is able to be connected to, at the expense of having a connection per web request.
Another strategy could be to check err when you issue a query, and if there is an error try to reestablish the global connection
server.get('/test', function (req, res, next) {
var query_string =
"SELECT DATE(date_transacted) AS transaction_date, " +
" MonthReports.tb AS MonthReports__tb " +
" FROM monthly_reports MonthReports " +
" WHERE ( date_transacted >= \'2015-01-00\' AND date_transacted <= \'2015-09-00\' ) ";
connection.query(
query_string
, function (err, rows, fields) {
if (err) {
// Try to reconnect here instead of throwing error and stopping node process, and reissue query
}
res.send(rows);
});
});
This website gives a complete answer. Credit goes to the writer of this article, not me.
https://www.exratione.com/2013/01/nodejs-connections-will-end-close-and-otherwise-blow-up/
/**
* #fileOverview A simple example module that exposes a getClient function.
*
* The client is replaced if it is disconnected.
*/
var mysql = require("mysql");
var client = mysql.createConnection({
host: "127.0.0.1",
database: "mydb",
user: "username",
password: "password"
});
/**
* Setup a client to automatically replace itself if it is disconnected.
*
* #param {Connection} client
* A MySQL connection instance.
*/
function replaceClientOnDisconnect(client) {
client.on("error", function (err) {
if (!err.fatal) {
return;
}
if (err.code !== "PROTOCOL_CONNECTION_LOST") {
throw err;
}
// client.config is actually a ConnectionConfig instance, not the original
// configuration. For most situations this is fine, but if you are doing
// something more advanced with your connection configuration, then
// you should check carefully as to whether this is actually going to do
// what you think it should do.
client = mysql.createConnection(client.config);
replaceClientOnDisconnect(client);
client.connect(function (error) {
if (error) {
// Well, we tried. The database has probably fallen over.
// That's fairly fatal for most applications, so we might as
// call it a day and go home.
//
// For a real application something more sophisticated is
// probably required here.
process.exit(1);
}
});
});
}
// And run this on every connection as soon as it is created.
replaceClientOnDisconnect(client);
/**
* Every operation requiring a client should call this function, and not
* hold on to the resulting client reference.
*
* #return {Connection}
*/
exports.getClient = function () {
return client;
};
This answer was extracted from another link nodejs mysql Error: Connection lost The server closed the connection
The extracted code;
var db_config = {
host: 'localhost',
user: 'root',
password: '',
database: 'example'
};
var connection;
function handleDisconnect() {
connection = mysql.createConnection(db_config); // Recreate the connection, since
// the old one cannot be reused.
connection.connect(function(err) { // The server is either down
if(err) { // or restarting (takes a while sometimes).
console.log('error when connecting to db:', err);
setTimeout(handleDisconnect, 2000); // We introduce a delay before attempting to reconnect,
} // to avoid a hot loop, and to allow our node script to
}); // process asynchronous requests in the meantime.
// If you're also serving http, display a 503 error.
connection.on('error', function(err) {
console.log('db error', err);
if(err.code === 'PROTOCOL_CONNECTION_LOST') { // Connection to the MySQL server is usually
handleDisconnect(); // lost due to either server restart, or a
} else { // connnection idle timeout (the wait_timeout
throw err; // server variable configures this)
}
});
}
handleDisconnect();
I have following simple node.js app which creates cursor from query which sorts collectoin by City and then temperature. After that, I iterate through cursor and update every document with highest temperatures for every city by adding highest : true.
var MongoClient = require('mongodb').MongoClient;
MongoClient.connect('mongodb://localhost:27017/temp', function(err, db) {
if(err) throw err;
var cursor = db.collection('data').find().sort( { City : 1, Temperature : -1 } );
var previous = '';
cursor.each(function(err, doc) {
if(err) throw err;
if(doc == null) {
console.dir("Closing database connection");
return db.close();
}
if (previous != doc.City) {
previous = doc.City;
var query = { _id : doc._id };
var operator = { '$set' : { highest : true } };
console.dir(doc.City + " is " + doc.Temperature + "; ");
db.collection('data').update(query, operator, function(err, updated) {
if(err) {
console.error('Error:', err);
throw err;
}
console.dir("Successfully updated: " + JSON.stringify(updated));
});
}
});
});
The problem here is that only first city gets updated properly, here is the output:
'Berlin is 81; ' 'Successfully updated: 1' 'Paris Florida is 83; '
'Warsaw New Mexico is 57; ' 'Barcelona Vermont is 57; ' 'Closing
database connection' Error: { [MongoError: Connection Closed By
Application] name: 'MongoError' } Error: { [MongoError: Connection
Closed By Application] name: 'MongoError' } Error: { [MongoError:
Connection Closed By Application] name: 'MongoError' }
My guess on what is happening is: the cursor goes through all documents calls update on those with highest temperatures:
db.collection('data').update(query, operator, function(err, updated)
but before the callback returns, cursor finishes iterating and this fragment of code is called which closes connection:
if(doc == null) {
console.dir("Closing database connection");
return db.close();
}
after that, all updates which didn't finish processing will error out since no db connection is available.
What's the proper way of handling it so that connection is closed only after all documents are updated successfully?
As Neil mentioned, we can use .stream(), but I was able to make the program execute as expected by counting already processed updates and closing db connection after all documents which we expect to be updated were updated.
In my case it was pretty simple since I have only 4 cities in database, so I expect only 4 documents to be updated. We could also obtain this number through the query and counting results, but that's good enough for me.
Here's the working code:
var MongoClient = require('mongodb').MongoClient;
MongoClient.connect('mongodb://localhost:27017/temp', function(err, db) {
if(err) throw err;
var cursor = db.collection('data').find().sort( { City : 1, Temperature : -1 } );
var previous = '';
var updatedCount = 0;
var expectedToUpdate = 4; // Hardcoded, but we might want to obtain it pragmatically
cursor.each(function(err, doc) {
if(err) throw err;
if(doc == null) {
return
}
if (previous != doc.City) {
previous = doc.City;
var query = { _id : doc._id };
var operator = { '$set' : { highest : true } };
console.dir(doc.City + " is " + doc.Temperature + "; ");
db.collection('data').update(query, operator, function(err, updated) {
if(err) {
console.error('Error:', err);
throw err;
}
console.dir("Successfully updated: " + JSON.stringify(updated));
updatedCount++;
if (updatedCount == expectedToUpdate) {
console.dir("updated expected number of documents. Closing db.");
db.close();
}
});
}
});
});
I usually find the node stream interface to be a better option for an iterator. There is a .stream() method on the cursor object for this:
var stream = db.collection('data').find().sort(
{ City : 1, Temperature : -1 }
).stream();
stream.on('data',function(data) {
// do things with current document
// but pause on things with a callback then resume;
stream.pause();
db.collection('data').update(query, operator, function(err, updated) {
// resume the stream when this callback is done
stream.resume();
})
});
stream.on('end',function() {
// Called when everything is complete
// db.close is safe here as long as you are no longer using the connection
db.close();
});
In fact from version 2.0 of the native driver the stream interface is part of the default cursor object.
But in general, only call db.close() for one off processing scripts. You should generally not be calling it at all in server type implementations and just leaving the connection open over the life-cycle.