SQLITE_MISUSE: bad parameter or other API misuse [duplicate] - javascript

I've searched on how to create a sqlite3 database with a callback in Node.js and have not been able to find any links. Can someone point me towards documentation or provide a 2-3 line code sample to achieve the following:
Create a sqlite3 database and catch an error if the creation fails for any reason.
Here is what I've tried:
let dbCreate = new sqlite3.Database("./user1.db", sqlite3.OPEN_CREATE, function(err){
if(!err){
logger.infoLog("Successfully created DB file: " + dbFileForUser + " for user: " + username );
} else {
logger.infoLog("Failed to create DB file: " + dbFileForUser + ". Error: " + err );
}
});
dbHandler[username] = dbCreate;
When I execute this, I get the following error:
"Failed to create DB file: ./database/user1.db. Error: Error: SQLITE_MISUSE: bad parameter or other API misuse"
This call without callback works just fine.
var customDB = new sqlite3.Database("./custom.db", sqlite3.OPEN_READWRITE | sqlite3.OPEN_CREATE);
But in this, I will not know if I run into any errors while creating the Database.

Try this:
let userDB = new sqlite3.Database("./user1.db",
sqlite3.OPEN_READWRITE | sqlite3.OPEN_CREATE,
(err) => {
// do your thing
});
Example.

#Irvin is correct, we can have a look at http://www.sqlitetutorial.net/sqlite-nodejs/connect/ and
check it says if you skip the 2nd parameter, it takes default value as sqlite3.OPEN_READWRITE | sqlite3.OPEN_CREATE
and in this case if database does not exist new database will be created with connection.
sqlite3.OPEN_READWRITE: It is to open database connection and perform read and write operation.
sqlite3.OPEN_CREATE : It is to create database (if it does not exist) and open connection.
So here is the first way where you have to skip the 2nd parameter and close the problem without an extra effort.
const sqlite3 = require("sqlite3").verbose();
let db = new sqlite3.Database('./user1.db', (err) => {
if (err) {
console.error(err.message);
} else {
console.log('Connected to the chinook database.|');
}
});
db.close((err) => {
if (err) {
return console.error(err.message);
}
console.log('Close the database connection.');
});
And this is the 2nd way to connect with database (already answered by #Irvin).
const sqlite3 = require("sqlite3").verbose();
let db = new sqlite3.Database('./user1.db', sqlite3.OPEN_READWRITE | sqlite3.OPEN_CREATE
, (err) => {
if (err) {
console.error(err.message);
} else {
console.log('Connected to the chinook database.');
}
});
db.close((err) => {
if (err) {
return console.error(err.message);
}
console.log('Close the database connection.');
});

Related

My Node Script Hangs after functions are finished

I'm calling three functions, after the completion of these functions I want my script to close on it's own but it just hangs.
I've tried making the functions async/promise based, closing the database after each 'mongodb' type function, and using process.exit() within a function as a callback to the last called function.
Connecting to the (local - not Atlas) Database:
MongoClient.connect(local, {useNewUrlParser: true, useUnifiedTopology: true}, function(err, db) {
if (err) {
console.log(err)
}
else {
console.log('Connected to MongoDB...')
//Read in data from jsonfiles and store each file's contents into the database : This is where the functions are being called... within a successful connect to the MongoDB
insertJSON(db, jsonfiles, 'requests', jsonfilesSource)
insertJSON(db, issuedfiles, 'issuedLicenses', isssuedfilesSource)
insertLicenses(db)
}
db.close()
})
Function 1:
function insertJSON(db, dirBuf,collection, sourceFolder) {
var database = db.db('license-server')
var collection = database.collection(collection)
fs.readdir(dirBuf, function(err, files) {
if (err) {
console.log(err.message)
}
else {
files.forEach(function(filename) {
var text = fs.readFileSync(sourceFolder + filename);
var filecontents = JSON.parse(text)
//collection.insertOne(filecontents)
collection.findOne({"DisplayTitle" : filecontents.DisplayTitle, "NodeInformation" : filecontents.NodeInformation, "Date": filecontents.Date})
.then(function(result) {
if(result) {
console.log(`An Item could already be in the database: A file is unique if its display title, nodeinformation, and date are different.
the items display title is ${result.DisplayTitle}`)
return
}
else {
collection.insertOne(filecontents)
console.log(`Added ${filecontents.DisplayTitle} to database`)
}
})
.catch(function(error) {
console.log(error)
})
})
}
})
}
Function 2:
function insertLicenses(db) {
// Set up GridFS to import .lic and .licx files into the database
var database = db.db('license-server')
var collection = database.collection('fs.files')
var bucket = new mongodb.GridFSBucket(database);
var dirBuf = Buffer.from('../license-server/private/licenses')
fs.readdir(dirBuf, function(err, files) {
if (err) {
console.log(err.message)
}
else {
files.forEach(function(filename) {
collection.findOne({"filename": filename}).
then(function(result) {
if(result) {
console.log(`The file ${filename} is already in the database`)
return
}
else {
fs.createReadStream('./private/licenses/' + filename).
pipe(bucket.openUploadStream(filename)).
on('error', function(error) {
assert.ifError(error)
}).
on('finish', function() {
console.log(`Uploaded ${filename}`)
})
}
})
})
}
})
// I tried calling db.close() here since this is the last function to be called. No luck.
}
I'm guessing it has something to do with the mongodb functions having their own way to close themselves but I couldn't seem to find what I was looking for in previous attempts to resolve this issue.
The expected result should be the script closing itself, the actual result is a handing script.
All of these database calls are asynchronous -- the result of this code running is to immediately call db.close and then do the work in insertJSON and insertLicenses. If you were to rewrite this to use async/await (and you'd need to update your other functions as well) the db.close call would close the db, and that would allow the script to exit:
await insertJSON(db, jsonfiles, 'requests', jsonfilesSource)
await insertJSON(db, issuedfiles, 'issuedLicenses', isssuedfilesSource)
await insertLicenses(db)
db.close()
https://developer.mozilla.org/en-US/docs/Learn/JavaScript/Asynchronous/Introducing
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function

ER_PARSE_ERROR you have an error in your SQL syntax: Selecting entire table from MySQL

I looked at similar questions first, but couldn't figure out how to change my code to get it working. I'm new to Node.js and MySQL. I'm running into this error:
My code is as follows (I am connecting okay, just changed host/pw info):
var mySQLpointer, connObj;
mySQLpointer = require("mysql");
connObj = mySQLpointer.createConnection( {
host: "host",
user: "user",
password: "pw",
database: "db"
} );
connObj.connect( function(err) {
if (err)
// throw err; or use the follwowing command
console.log("Connection Error: " + err.stack);
else
// console.log("Connected to DB. :-)");
console.log("Connection OK! ID = " + connObj.threadId);
});
let sqlStmt = "SELECT * FROM Product-Service";
connObj.query(sqlStmt,
function(err, dataSet, fields) {
if (err)
throw err;
else {
console.log(dataSet);
}
}
);
connObj.end();
What I'm trying to do is to display all rows and columns in my SQL table, I only have 3 rows in there:
Ideally, I'm trying to get them to display like this:
Any help would be appreciated.
You need backticks around your tablename, because of the -:
let sqlStmt = "SELECT * FROM `Product-Service`";

Accessing Mongo DB from within Node

I’m trying to connect to a database through node. I’ve got it working with smaller databases using a Mongo URL of the form:
mongodb://[username]:[password]#db1-a0.example.net:27017/[DB-Name]
When I switched it out to use a larger DB, using the Mongo URL of the form:
mongodb://[username]:[password]#db1-a1.example.net:27017,db2.example.net:2500/[DB-Name]?replicaSet=test
It throws a ‘ RangeError: Maximum call stack size exceeded’ error and won’t connect. This URL is the onlything that has changed between the databases.
I’ve checked the db details and can access it through RoboMongo / Robo 3T so the database definitely exists.
Trying to connect through Mongoose version ^5.2.10 using the following code:
function connect() {
if (MONGO_URL) {
mongoose.connect(MONGO_URL, err => {
if (err) {
console.log('error connecting')
console.log(err)
}
})
} else {
mongoose.connect(`mongodb://${host}`, {
user,
pass,
dbName,
useNewUrlParser: true //depresiation issue
}, err => {
if (err) {
console.log('error connecting')
console.log(err)
}
})
}
}
mongoose.connection.on('error', (message) => {
console.log('connection error!') //This is logged
console.log(message)
process.exit()
})
mongoose.connection.on('disconnected', connect)
connect()
Looks like you are trying to use a replica set. If so try to connect like following`
var uri = `mongodb://${userName}:${encodeURIComponent(password)}#${clusterName}/${dbName}?ssl=true&replicaSet=${process.env.replicaSetName}&authSource=${authDB}`
var db = mongoose.connect(uri).then().catch() // Whatever inside the then and catch blocks
`

Using Promise for parallel insert query on MySQL fails

I wrote a code for running insert queries in parallel in Node.js and I am also using Promise.js.
But the code fails and raises an exception of "Duplicate Primary Key" entry.
The code is as follows,
var Promise = require("promise");
var mySql = require("mysql");
var _ = require("underscore");
var connection = mySql.createConnection({
host : "localhost",
user : "root",
password : "rahul",
database : "testDb" //schema
});
connection.connect();
function insertDept(name){
return new Promise(fn);
function fn(resolve,reject){
getMaxDept().then(function(rows){
var deptId = rows[0]["DeptId"];
deptId = (_.isNull(deptId) === true) ? 125 : deptId;
var sql = "insert into departmentTbl values("+deptId+",'"+name+"')";
console.log(sql);
connection.query(sql,function(err,rows,fields){
if(err){
console.log(err);
return reject(err);
}else{
return resolve(rows);
}
});
}).catch(function(error){
return reject(error);
});
}//fn
}//insertDept
function getMaxDept(){
return new Promise(fn);
function fn(resolve,reject){
var sql = "select max(deptId) + 1 as 'DeptId' from departmentTbl";
connection.query(sql,function(err,rows,fields){
if(err){
console.log(err.stack);
return reject(err);
}else{
return resolve(rows);
}
});
}// fn
} //getMaxDept
function createDeptForAll(){
var promiseObj = [];
if(arguments.length > 0){
_.each(arguments,callback);
}else{
throw "No departments passed";
}
function callback(deptName){
promiseObj.push(insertDept(deptName))
}
return Promise.all(promiseObj);
}//createDeptForAll
createDeptForAll("Archiology","Anthropology").then(function(createDepartment){
createDepartment.then(function(rows){
console.log("Rows inserted "+rows["affectedRows"]);
}).catch(function(error){
console.log(error);
}).done(function(){
connection.end();
});
});
When I run the above code code,
the output is
rahul#rahul:~/myPractise/NodeWebApp/NodeMySqlv1.0$ node queryUsingPromise02.js
insert into departmentTbl values(125,'Archiology')
insert into departmentTbl values(125,'Anthropology')
{ [Error: ER_DUP_ENTRY: Duplicate entry '125' for key 'PRIMARY'] code: 'ER_DUP_ENTRY', errno: 1062, sqlState: '23000', index: 0 }
As Department Id is Primary key and the promises run in parallel,
the primary key for second Department's insert query fails.
As you can see, before any insert query, I fetch the max of departments + 1.
if the above query fails, I assign '125'.
Now, what should I change so that my above written code runs.
Should I use trigger of "before insert" for calculating next value of primary key of "department ID" at the database level itself or should I do something in my own Node.js code?
This issue is not restricted to node or JavaScript, but you will face this problem with any technology that tries to write to an SQL database in parallel. Unique id generation in a scenario like this is not trivial.
If you have the option to do so, make your id field in your database AUTO_INCREMENT, this will save you a lot of headaches in situations like this.
More about AUTO_INCREMENT.
Advice on AUTO_INCREMENT looks good.
You might also consider writing a promisifier for connection.query(), allowing for the remaining code to be tidied up.
So with getMaxDept() purged and a connection.queryAsync() utility in place, you might end up with something like this :
var Promise = require("promise");
var mySql = require("mysql");
var connection = mySql.createConnection({
host: "localhost",
user: "root",
password: "rahul",
database: "testDb" //schema
});
connection.connect();
// promisifier for connection.query()
connection.queryAsync = function(sql) {
return new Promise((resolve, reject) => {
connection.query(sql, (err, rows, fields) => {
if(err) { reject(err); }
else { resolve({'rows':rows, 'fields':fields}); }
});
});
};
function insertDept(name) {
var sql = "insert into departmentTbl values(" + name + "')"; // assumed - needs checking
return connection.queryAsync(sql);
}
function createDeptForAll(departments) {
if(departments.length > 0) {
return Promise.all(departments.map(insertDept));
} else {
return Promise.reject(new Error('No departments passed'));
}
}
createDeptForAll(['Archiology', 'Anthropology']).then((results) => {
results.forEach((result) => {
console.log("Rows inserted " + result.rows.affectedRows);
connection.end();
});
}).catch((error) => {
console.log(error);
connection.end();
});

NodeJs Cassandra driver is getting stuck

I am trying to connect to a Cassandra cluster in NodeJs but when I execute a query, prepared or not, it just gets stuck. The callback isn't called at all and the process is just hanging. What could cause this? I am able to connect using devcenter using the same host/keyspace.
var Cassandra = require('cassandra-driver');
var cassandraClient = new Cassandra.Client({ contactPoints:['*.*.*.*'], keyspace: '*'});
cassandraClient.on('log', function(level, className, message, furtherInfo) {
console.log('log event: %s -- %s', level, message);
});
cassandraClient.connect(function (err) {
console.log(err)
});
console.log("Starting C* getting data");
cassandraClient.execute(query, params, {prepare: true}, function(err, result) {
if(err) console.log(err);
res = result;
console.log("Done C*");
console.log('result: ' + result.rows[0]);
notDone = false;
});
Output:
Init Get IP Info
log event: info -- Adding host *.*.*.*:9042
Starting C* getting data
And it gets stuck there. Any idea what is causing this?

Categories

Resources