node.js mysql insert ER_PARSE ERROR near HANDLER RESOLVED - javascript

I have an API running in AWS Lambda written on Node.JS (6.10.3), using the npm package mysql (2.13.0). This is deployed and managed using Serverless.
I am Inserting a single row into a table on a MariaDB RDS instance using the following code:
var mysql = require('mysql');
var connection = mysql.createConnection({
"host": process.env.CONFIG_HOST,
"user": process.env.CONFIG_WRITE_USER,
"password": process.env.CONFIG_WRITE_PW,
"database": process.env.CONFIG_DB,
"connectionLimit": 1
});
module.exports.post = (event, context, callback) => {
var body = JSON.parse(event.body);
var params = event.someParameter;
var sql = getSql(body.name);
console.log(sql);
connection.query(sql, onQueryComplete(params, callback));
}
const onQueryComplete = function(params, callback) {
return function(error, result) {
if (error) {
console.log(error);
callback(null, 'some error response');
}
// do something else...
}
}
function getSql(name) {
return `
INSERT INTO lds_config.test_table
(
name,
name_hash
)
VALUES
(
'${name}',
MD5('${name}')
);`;
}
If I check the table, I can see that the insert has completed successfully and the new row has been added, however error is being set - meaning something has gone wrong somewhere (possibly after the insert).
The error returned by mysql (in console.log) is:
{
Error: ER_PARSE_ERROR: You have an error in your SQL syntax; check the manual that corresponds to your MariaDB server version for the right syntax to use near 'HANDLER RESOLVED _____');
// Timeout clearing if needed
' at line 2
at Query.Sequence._packetToError (...\node_modules\mysql\lib\protocol\sequences\Sequence.js:52:14)
at Query.ErrorPacket <<stack trace continues>>)
code: 'ER_PARSE_ERROR',
errno: 1064,
sqlState: '42000',
index: 0
}
This is followed by a second error, stating that callback is not a function:
Debug: internal, implementation, error
TypeError: Uncaught error: callback is not a function
Similar code is working elsewhere in the API for selects. Thanks!
Further notes:
The table is:
CREATE TABLE lds_config.test_table(id INT PRIMARY KEY AUTO_INCREMENT, name VARCHAR(500), name_hash CHAR(32));
The result from console.log(sql); is:
INSERT INTO lds_config.test_table
(
name,
name_hash
)
VALUES
(
'test01',
MD5('test01')
);
Which works when I run it directly in mysql workbench.

Include the query also since the first error is regarding MySQL syntax. About callback is not a function it is not passed as a parameter in below code as far as I see.
const onQueryComplete = function(params) {
return function(error, result, callback) {
if (error) {
console.log(error);
callback(null, 'some error response');
}
// do something else...
}
}
Above is the corrected one.

Related

Expo Sqlite Rollback Transaction Not Working Properly

"My question is about react-native sqlite-expo"
I’m using expo-sqlite for implementing a local database on a android app.
How to perform sqlite rollback transaction with expo library or is there any other component?I try to do transaction with this code :
const Deletequery = DELETE FROM Talukalist;
const InsertQuery = INSERT into Talukalist(State_Code, District_Code, Taluka_Code, Taluka_Name) VALUES ${string};
const SeleteQuery = SELECT * FROM Talukalist;
db.transaction(
function (tx) {
tx.executeSql(
Deletequery,
,
(tx, results) => {
console.log(“TalukaDelete query result callback”);
},
(tx1, error) => {
console.log(“TalukaDelete query Error callback”, error.message);
return true;
}
);
tx.executeSql(
InsertQuery,
,
(tx, results) => {
console.log(“TalukaInsert query result callback”);
},
(tx1, error) => {
console.log(“TalukaInsert query Error callback”, error.message);
return true;
}
);
},
(error) => {
console.log("DB Transaction Error Message :- ", error.message);
return true;
},
() => {
//console.log(“Success”);
db.transaction(function (tx) {
tx.executeSql(SeleteQuery, , (tx, results) => {
console.log(“DB Transaction Select query Success callback”);
});
});
}
);
Before any error i have 6 records in my table and it works properly with success callback.
Here is the output of first trancation :
TalukaDelete query result callback
TalukaInsert query result callback
DB Transaction Success callback result Array [
Object {
“COUNT(*)”: 6,
},
]
After success of one transaction i have tried to put error in my query and check it works proper or not.
It gives me such kind of output :
TalukaDelete query result callback
TalukaInsert query Error callback near “,”: syntax error (code 1 SQLITE_ERROR): , while compiling: INSERT into Taluka,list(State_Code, District_Code, Taluka_Code, Taluka_Name) VALUES (‘6’,‘63’,‘393’,‘Adampur’),(‘6’,‘65’,‘6409’,‘Alewa’),(‘6’,‘58’,‘359’,‘Ambala’),(‘6’,‘58’,‘6406’,‘Ambala Cantonment’),(‘6’,‘67’,‘373’,‘Assandh’),(‘6’,‘69’,‘6457’,‘Ateli’)
DB Transaction Delete callback result near “,”: syntax error (code 1 SQLITE_ERROR): , while compiling: INSERT into Taluka,list(State_Code, District_Code, Taluka_Code, Taluka_Name) VALUES (‘6’,‘63’,‘393’,‘Adampur’),(‘6’,‘65’,‘6409’,‘Alewa’),(‘6’,‘58’,‘359’,‘Ambala’),(‘6’,‘58’,‘6406’,‘Ambala Cantonment’),(‘6’,‘67’,‘373’,‘Assandh’),(‘6’,‘69’,‘6457’,‘Ateli’)
DB Transaction Error callback result Array [
Object {
“COUNT(*)”: 0,
},
]
So in my second trancation gives me error, even though my first transaction is work successfully and remove all the records and rollback transaction went failed. Please help me in this and share your answer with me.

Node SQLite3 - crashes when running insert on specific table

I have a SQLite database I am trying to add data to with the sqlite3 package. My query is as follows, and works in the SQLite command line.
'INSERT INTO `EVENTS`(`ID`,`EventName`,`EventSociety`,`BookerName`,`BookerEmail`,`BookerStudentID`,`BookerPhone`,`TimeStart`,`TimeEnd`,`EquipmentList`,`EventSearchYear`,`EventSearchMonth`,`EventSearchDay`) VALUES (NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);';
And I'm using this code to insert to the database in node.
db.run("begin transaction");
let sql = 'INSERT INTO `EVENTS`(`ID`,`EventName`,`EventSociety`,`BookerName`,`BookerEmail`,`BookerStudentID`,`BookerPhone`,`TimeStart`,`TimeEnd`,`EquipmentList`,`EventSearchYear`,`EventSearchMonth`,`EventSearchDay`) VALUES (NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);';
console.log(sql);
db.run(sql,(err) => {
res.send('ok');
});
db.run("commit");
Trying this in node hard crashes, with a Illegal instruction: 4. However, it is only happening on two tables, both with over 5 fields, in my database, and not any other smaller ones. Is there a character limit I'm unaware of?
To avoid crash, we need to handle error as below:
Example
The line db.run(sql, params, function (err) { in below example:
let sql = `INSERT INTO Users(id,firstName,lastName,email,password,permissionLevel) VALUES (?,?,?, ?,?,?)`;
let params = [uuid4(), "fn1", "ln1", "a#a2.com", "pwd1", 0];
db.run(sql, params, function (err) {
if (err) {
console.error("Error: Insert failed: ", err.message);
console.error("Error: Full error: ", err);
return;
}
console.log("insert success");
});

How to solve nodejs uncaughtException: Connection already released error and MaxListenersExceededWarning?

I am building an express server to receive request (a dict with 10 items) from my react front end and then save the data to database. Below is my code. I found that my code is work and the query does save the record back to Db. But in each for loop, this error is returned in server. What cause this error and the MaxListenersExceededWarning?
The request data:
{{.....}, {.....}, {.....}, {.....}, {.....}} #10 item
Code:
connection.js:
const p = mysql.createPool({
"connectionLimit" : 100,
"host": "example.org",
"user": "test",
"password": "test",
"database": "test",
"multipleStatements": true
});
const getConnection = function(callback) {
p.getConnection(function(err, connection) {
callback(err, connection)
})
};
module.exports = getConnection
routers.js
router.post('/test', (req, res) => {
getConnection(function(err, conn){
if (err) {
return res.json({ success: false, error: err })
} else {
const dict = req.body;
Object.keys(dict).forEach(function(r){
#putting dict's value to query
query = "UPDATE ......;"
conn.query(query, function (err, result, fields) {
conn.release()
console.log(query)
if (err) {
console.log("err")
return res.json({ success: false, error: err });
}
});
});
}
});
return res.json({ success: true });
});
Error:
error: uncaughtException: Connection already released
Error: Connection already released
at Pool.releaseConnection (/home/node_modules/mysql/lib/Pool.js:138:13)
at PoolConnection.release (/home/node_modules/mysql/lib/PoolConnection.js:35:15)
at Query.<anonymous> (/home/routes/test.js:276:22)
at Query.<anonymous> (/home/node_modules/mysql/lib/Connection.js:526:10)
at Query._callback (/home/node_modules/mysql/lib/Connection.js:488:16)
at Query.Sequence.end (/home/node_modules/mysql/lib/protocol/sequences/Sequence.js:83:24)
at Query._handleFinalResultPacket (/home//node_modules/mysql/lib/protocol/sequences/Query.js:149:8)
at Query.OkPacket (/home//node_modules/mysql/lib/protocol/sequences/Query.js:74:10)
at Protocol._parsePacket (/home//node_modules/mysql/lib/protocol/Protocol.js:291:23)
at Parser._parsePacket (/home//node_modules/mysql/lib/protocol/Parser.js:433:10)
(node:15881) MaxListenersExceededWarning: Possible EventEmitter memory leak detected. 11 finish listeners added. Use emitter.setMaxListeners() to increase limit
One connection (conn) is being retrieved from the pool, and is used to launch 10 queries in the forEach loop.
When the first query finishes to run, the first step of its callback is: conn.release(). The connection is released.
When the second query finishes to run, its callback also tries to release the connection,causing the error.
This problem might be solved in multiple ways:
Solve using a counter
In the callback of the database query, before calling call.release, check the number of queries already processed, and only close the connection when the last product is being processed.
const dict = req.body;
// initialize counter
let itemCount = 0
, errors = []
Object.keys(dict).forEach(function(r){
#putting dict's value to query
query = "UPDATE ......;"
conn.query(query, function (err, result, fields) {
// check whether this is the last callback
if (itemCount === dict.length-1) {
conn.release()
let result = errors.length ? { success: false, error: errors } : { success: true }
res.json(result)
}
// increment counter
itemCount++
console.log(query)
if (err) {
console.log("err")
errors.push(err)
}
});
});
Edit: There is also an issue with the res.json calls: inside the code in the question, res.json({ success: true }) is always executed, without waiting for the queries' execution results. The modified code sample above calls res.json only once after the execution of all queries, this is the only place where res.json should be called. This implies modifying the client-side code so that it can handle an array of errors, rather than only one error.
Solve by using a recursive function instead of for loop.
It is not a good practice to use for loops for the execution of asynchronous code. You might run into Maximum call stack size exceeded errors whenever the data volume gets too large.
Instead, create a recursive function (e.g. updateDictItem) to process one update query at a time. Read more about the asynchronous patterns in node.js in this article.
Other possible enhancements
Rather than firing ten database queries, it is worth considering grouping all the updates in one MERGE update statement, otherwise doing all the updates in a TRANSACTION.

POST response error after collection.insert with NodeJS Mongo module

I've got this error when trying to POST
> process.nextTick(function() { throw err; });
> ^
>
> TypeError: first argument must be a string or Buffer
> at ServerResponse.OutgoingMessage.end (_http_outgoing.js:524:11)
Errors shows that something's wrong with utils and cursor both from mongodb module, but what are they?
Everything works nice on GET but brakes on POST (postman and passing as text {"name":"Computer","price":2500}) - i cannot trace which module or instance is braking the code.
This is my conn with db:
// Our primary interface for the MongoDB instance
var MongoClient = require('mongodb').MongoClient;
// Used in order verify correct return values
var assert = require('assert');
var connect = function (databaseName, callBack) {
var url = 'mongodb://localhost:27017/' + databaseName;
MongoClient.connect(url,
function (error, database) {
assert.equal(null, error);
console.log("Succesfully connected to MongoDB instance!");
callBack(database);
});
};
exports.find = function (databaseName, collectionName, query, callback) {
connect(databaseName, function (database) {
var collection = database.collection(collectionName);
collection.find(query).toArray(
// Callback method
function (err, documents) {
// Make sure nothing went wrong
assert.equal(err, null);
// Print all the documents which we found, if any
console.log("MongoDB returned the following documents:");
console.dir(documents)
callback(err, documents);
// Close the database connection to free resources
database.close();
})
})
};
exports.insert = function (databaseName, collectionName, object, callback) {
connect(databaseName, function (database) {
var collection = database.collection(collectionName);
collection.insert(document, {w: 1}, function (err, documents) {
console.log("Added a new document");
console.log(documents[0]);
callback(err, documents[0]);
});
})
};
exports.remove = function (databaseName, collectionName, object, callback) {
connect(databaseName, function (database) {
var collection = database.collection(collectionName);
collection.remove(object, function (err, result) {
callback(err, result);
database.close();
});
})
};
The issue is actually pretty straightforward, so I'm surprised that you're not getting a better error message.
In your code:
collection.insert(document, {w: 1}, function (err, documents) {
console.log("Added a new document");
console.log(documents[0]); // I expect this to log undefined
callback(err, documents[0]);
});
The second argument passed into the collection.insert callback is actually a results object, not the documents that were inserted. So, documents[0] ends up being undefined because it's not an array of documents. Thus, when you trying to send undefined as a response, it's failing.
If you intention is to pass the newly created documents, you're going to have to use the result object to get the _id and attach it to the document you inserted.
As a side note, I would consider keeping a connection open to your database rather than creating a new connection every time you want to talk with Mongo.

Returning a response immediately with res.json

I have an express route which takes in some parameters, queries the database, and then returns some response.
I am using sequelize to query the db:
router.get('/query', function(req,res) {
var name = req.params.name;
var gid = req.params.gid;
// Query the db
models.user.find({ where: { name: name }}).then(function(user) {
models.group.find({ where: { id: gid }}).then(function(group) {
// if user found, return data to client
if (user) {
res.json({"user": user, "group": group});
}
});
}).catch(function(error) {
// catch any errors from db query
res.status(500).json({"error":error});
});
// Return a server error for any other reason
// This causes ERROR
res.status(500).json({"error":"Something went wrong. Check your input."});
});
But I keep getting the error on the last line:
Can't set headers after they are sent
It seems like the last line is ALWAYS run, even if it finds a user (which should return data to the client and be done).
Why doesn't res.json(..) immediately return to the client when a user is found? Since headers were already set, when the last line runs, it throws that error.
You need to only conditionally return an error. The line:
res.status(500).json({"error":"Something went wrong. Check your input."});
is always getting executed. The reason for this is that the function you pass to the find method is only called later in the event loop after the db responds. This means that when that call back is called you have already set the error on the response.
Your should either remove that line or decide when you want to return an error but don't return an error every time.
Remember javascript is asynchronous.
As soon you call this function
models.user.find({ where: { name: name }})
That last line is executed:
res.status(500).json({"error":"Something went wrong. Check your input."});
It seems you are trying to cater for 2 scenarios:
Bad request data from client - i.e. no gid given
Internal server errors - i.e. error with the database
I would recommend changing your catch function to something like this:
.catch(function(error) {
// catch any errors from db query
if (err === "Unable to connect to database") {
return res.status(500).json({ error: "There was an internal error"})
}
res.status(400).json({"error": "Bad input, please ensure you sent all required data" });
});
Have a read up on the list of standard HTTP status codes:
http://en.wikipedia.org/wiki/List_of_HTTP_status_codes
What #bhspencer said is right. You have to remove that last line.
That line probably gets executed before any query in the database.
You need to implement a return in
models.user.find({ where: { name: name }}).then(function(user) {
models.group.find({ where: { id: gid }}).then(function(group) {
// if user found, return data to client
if (user) {
res.json({"user": user, "group": group});
return;
}
});
}).catch(function(error) {
// catch any errors from db query
res.status(500).json({"error":error});
return;
});
Actually res.json( does not end the processing of node.js code execution without return statement.

Categories

Resources