Async await sqlite in javascript - javascript

I'm looking at this tutorial, which has a library called aa-sqlite in order to replace Promises() syntax with async-await.
I'm not seeing aa-sqlite on npm. Is there another, updated syntax for async await sqlite?
Here is what I'm trying with the standard sqlite library:
const sqlite3 = require('sqlite3').verbose();
let db = new sqlite3.Database("tmp.db")
async function myfunc(db) {
let sql = "SELECT id id FROM TABLE LIMIT 2"
let res1 = await db.run(sql)
console.log(res1)
for (row of res1) {
console.log(row);
}
But this yields
TypeError: res1 is not iterable
I am not expecting res1 to be an object, but instead an iterator of results. How can I async/await the results of a db.run query in ES7/ES8?

I sort of tried sqlite npm package, which implements async/await over splite3, but it is not that easy to use.
A simple way is to create a little module and promessify the main sqlite3 functions.
Here is my simple module I created for a Discord chatbot database:
const sqlite3 = require('sqlite3');
const util = require('util');
let db = new sqlite3.Database('./db/chatbot.sqlite3', sqlite3.OPEN_READWRITE, (err) => {
if (err) {
console.error(err.message);
}
console.log('Connected to the chatbot database.');
});
db.run = util.promisify(db.run);
db.get = util.promisify(db.get);
db.all = util.promisify(db.all);
// empty all data from db
db.clean_db = async function() {
await db.run("delete from users");
await db.run("delete from members");
await db.run("delete from guilds");
db.run("vacuum");
}
// any kind of other function ...
// and then export your module
module.exports = db;
How to use - now you can use the module like this in your code:
const db = require('./db');
// get one user
const myUser = await db.get("select * from users where id = ?", [id]);
if (! myUser)
return console.log("User with id", id, "not found");
// get all users
const users = await db.all("select * from users");
users.map((user) => { console.log(user.id, "/", user.name); });
// ... etc ...

For me the simplest solution would be to encapsulate the operation in a Promise like so:
const res = await new Promise((resolve, reject) => {
db.each('SELECT id FROM table', [], (err, row) => {
if (err)
reject(err)
resolve(row)
})
})
console.log(res)
With this you'll have the row result in res outside the callback and synchronously.

Try the sqlite package, rather than the sqlite3 used in the demo. It has better support for async await.

You are note seeing the aa-sqlite package because it's not a npm package.
The guy who wrote the tutorial you are refering simply put how he created this small aa-sqlite package, and it's all written inside the tuto, but it has not been published on npm.

Related

Unable to export db properties from nodejs module

I am trying to export database properties stored in properties file from Javascript module. By the time I read database properties file, Javascript file is already exported and data properties appear undefined wherever I use in other modules.
const Pool = require('pg').Pool;
const fs = require('fs')
const path = require('path');
class DbConfig {
constructor(dbData) {
this.pool = new Pool({
user: dbData['user'],
host: dbData['host'],
database: dbData['database'],
password: dbData['password'],
max: 20,
port: 5432
});
}
}
function getdbconf() {
const dbData = {};
fs.readFile("../../db_properties.txt"), 'utf8', (err, data) => {
if (err) {
console.error(err)
return
}
// dbData = {"user":"postgres", "password": "1234"...};
return dbData;
});
}
let db = new DbConfig(getdbconf());
let dbPool = db.pool;
console.log("dbpool : -> : ",dbPool); // username and password appear undefined
module.exports = { dbPool };
Is there a way to read data before exporting data from Javascript module?
Usually database config or any other sensitive info is read from a .env file using dotenv .
Or
you could also provide env from command line itself like
DB_HOST=127.0.0.1 node index.js
inside your index.js
console.log(process.env.DB_HOST)
Please create a new file (connection-pool.js) and paste this code:
const { Pool } = require('pg');
const poolConnection = new Pool({
user: 'postgresUserName',
host: 'yourHost',
database: 'someNameDataBase',
password: 'postgresUserPassword',
port: 5432,
});
console.log('connectionOptions', poolConnection.options);
module.exports = poolConnection;
For use it, create a new file (demo-connection.js) and paste this code:
const pool = require('./connection-pool');
pool.query('SELECT NOW();', (err, res) => {
if (err) {
// throw err;
console.log('connection error');
return;
}
if (res) {
console.log(res.rows);
pool.end();
}
});
This is an alternative option 🙂
Exporting the result of async calls
To export values which have been obtained asynchronously, export a Promise.
const fs = require('fs/promises'); // `/promise` means no callbacks, Promise returned
const dbDataPromise = fs.readFile('fileToRead')); //`readFile` returns Promise now
module.exports = dbDataPromise;
Importing
When you need to use the value,
const dbDataPromise = require('./dbdata');
async init() {
const dbData = await dbDataPromise;
}
//or without async, using Promise callbacks
init() {
dbDataPromise
.then(dbData => the rest of your code that depends on dbData here);
}
Current code broken
Please note that your current code, as pasted above, is broken:
function getdbconf() {
const dbData = {};
fs.readFile("../../db_properties.txt"), 'utf8', (err, data) => {
//[...] snipped for brevity
return dbData;
});
}
fs.readFile "returns" dbData, but there is nothing to return to, since you are in a callback which you did not call yourself. Function getdbconf returns nothing.
The line that says let db = new DbConfig(getdbconf()); will NOT work. It needs to be inside the callback.
The only way to avoid putting all of your code inside the callback (and "flatten" it) is to use await, or to use readFileSync
Avoiding the issue
Using environment variables
Suhas Nama's suggestion is a good one, and is common practice. Try putting the values you need in environment variables.
Using synchronous readFile
While using synchronous calls does block the event loop, it's ok to do during initialization, before your app is up and running.
This avoids the problem of having everything in a callback or having to export Promises, and is often the best solution.

js doesnt execute extern function to recieve data from db

I'm trying to receive same version data from my MySQL (MariaDB) Server.
For better maintenance i created one connection Object to handle all database queries.
However when I query some data, it seems like it isn't executed in time, but later when the first await command appears.
dbControl.js :
var mysql = require('mysql');
function getConnection(){
let dbConnection = mysql.createConnection({
host: "localhost",
user: "root",
password: ""
});
dbConnection.connect(function (err) {
if (err) throw err;
});
this.get_version = function() {
let sql = 'SELECT * FROM versionControl ORDER BY id DESC LIMIT 1;'
dbConnection.query(sql, function (err, result) {
if (err) throw err;
console.log("vData:", result);
return result;
});
}
}
module.exports.getConnection = getConnection;
dataHandler.js:
const browserControl = require('./browserControl');
const dbControl = require('../db/dbControl');
const dbConnection = new dbControl.getConnection();
let versionData;
// Here it should be executed -->
versionData = dbConnection.get_version();
console.log(versionData);
async function get_something(){
// Here it is executed -->
const browser = await browserControl.startBrowser();
//......
}
There is a 3th file which simply controls the program. At the moment it just executes the function get_something() like:
const originData = require('./dataHandler.js');
let data = originData.get_something();
console.log(data);
P.s.: its all running with node, thanks in advance ;_)
Your get_something() is marked as async.
Call it with await get_something() or get_something().then(console.log).
Ok, I got a solution. The SQL query function returns a promise now and I created an extra "getVersion"-async-function which is waiting for the resolve of the promise. Thus the promise waits for the db answer and the rest waits until the promise is resolved.
the dataHandler.js now looks like this:
const browserControl = require('./browserControl');
const dbControl = require('../db/dbControl');
const dbConnection = new dbControl.getConnection();
async function getVersion() {
let versionData;
versionData = await dbConnection.get_version();
console.log(versionData);
}
getVersion();
async function get_something(){
const browser = await browserControl.startBrowser();
}
and the query-function now looks like this:
this.get_version = function() {
let sql = 'SELECT * FROM versionControl.lol_scraper ORDER BY id DESC LIMIT 1;'
return new Promise(resolve => {
dbConnection.query(sql, function (err, result) {
if (err) throw err;
console.log("vData:", result);
resolve(result);
})
});
P.s.: still open for smarter or more modern solutions ;_)

MySQL NodeJS not getting latest data after write

I am having trouble figuring out the problem to an issue where when I write data (create, update, delete) then write a query to get the data after, the data that I receive back is the data prior to the write.
For example:
Let's say I have two functions createApple() and getAppleById. I have a utility function called getConnection() that gets a connection from a pool to be used for transactional queries. I have an endpoint that creates an apple and I get back to the insertId from mysql then I use that to get the apple but when I return it as a response, I get an empty object.
const createApple = async ({ type }) => {
const connection = await getConnection();
await connection.beginTransaction();
return await connection.query(`INSERT INTO apple (type) VALUES (?)`, [type]);
}
const getAppleById = async (appleId) => {
const connection = await getConnection();
return await connection.query(`SELECT * FROM apple WHERE id = ?`, [appleId]);
}
router.post(`/api/apples`, async (req, res) => {
const { insertId: createdAppleId } = await createApple({ ...req.body });
const apple = await getAppleById(createdAppleId);
res.status(201).send(apple); // <-- this returns {}
});
I noticed that if I add a console.log() before sending the data back, it does get back the data, for example:
router.post(`/api/apples`, async (req, res) => {
const { insertId: createdAppleId } = await createApple({ ...req.body });
const apple = await getAppleById(createdAppleId);
console.log(apple);
res.status(201).send(apple); // <-- this now returns the newly created apple
});
Any ideas on why this may be happening? Also, is this considered a good way of getting a newly created/updated entity or would it be better to make two separate calls:
First call to create/edit the entity (a POST or PATCH call)
Second call to get the entity (a GET call)
Any help is appreciated!
Thanks!
const createApple = async ({ type }) => {
const connection = await getConnection();
await connection.beginTransaction();
await connection.query(`INSERT INTO apple (type) VALUES (?)`, [type]);
await connection.commit();
}
I think error this function when you use transaction, you should commit or rollback transaction after finish query
This is best practice for me, I hope it useful for you
const createApple = async ({ type }) => {
const connection = await getConnection();
await connection.beginTransaction();
try{
await connection.query(`INSERT INTO apple (type) VALUES (?)`, [type]);
await connection.commit();
}catch{
await connection.rollback()
}
}

MongoDB reusable custom javascript module

I would like to create a local Javascript module I can "require" in other files to handle all MongoDB CRUD operations.
I wrote something as:
-- dbConn.js file --
require('dotenv').config()
const MongoClient = require('mongodb').MongoClient
const ObjectID = require('mongodb').ObjectID
let _connection
const connectDB = async () => {
try {
const client = await MongoClient.connect(process.env.MONGO_DB_URI, {
useNewUrlParser: true,
useUnifiedTopology: true
})
console.log('Connected to MongoDB')
return client
} catch (err) {
console.log(error)
}
}
exports.findOne = async () => {
let client = await connectDB()
if (!client) {
return;
}
try {
const db = client.db("Test_DB");
const collection = db.collection('IoT_data_Coll');
const query = {}
let res = await collection.findOne(query);
return res;
} catch (err) {
console.log(err);
} finally {
client.close();
}
}
exports.findAll = async () => {
let client = await connectDB()
if (!client) {
return;
}
try {
const db = client.db("Test_DB");
const collection = db.collection('IoT_data_Coll');
const query = {}
let res = await collection.find(query).toArray();
return res;
} catch (err) {
console.log(err);
} finally {
client.close();
}
}
Then in another file (not necessary inside Express app), say
-- app.js ---
const findAll = require('./dbConn').findAll
const findOne = require('./dbConn').findOne
findAll().then(res => JSON.stringify(console.log(res)))
findOne().then(res => JSON.stringify(console.log(res)))
I wonder if it is correct?
I have to close the connection after each method/CRUD operation?
I was trying to use IIF instead of ".then", as:
(async () => {
console.log(await findOne())
})()
But I receive a weird error saying that findAll is not a function.
What's wrong with it?
Thanks.
It really depends on your use case which isn’t clear If you are using Express or just stand alone and how frequent are you planning to run app.js
Either way your code is expensive, each time you reference dbCon.js you are opening a new connection to the database.
So you can fix app.js by only requiring dbCon.js once and use it..
The best practice is to ofcourse use connection pooling https://www.compose.com/articles/connection-pooling-with-mongodb/

Node express app calling mssql is saying that Connection is closed

I have another app which uses express and routes but this new app i was slimming it down. I know the connection string stuff is correct
script.getQuestions(connection);
script.getQuestions = function(connection,req, res){
console.log(connection);
}
I have read that some people said online to change to use a promise for async fixes this... problem is that with my function having req and res i don't know how to pass those in when i even try to refactor with a promise
"ConnectionError: Connection is closed"
"(module.js:487:32) code: 'ECONNCLOSED', name: 'ConnectionError' }"
What I call up (script) is
var sql = require('mssql');
exports.getQuestions = function(connection, req,res){
console.log(connection);
var request = new sql.Request(connection);
var query = 'select * from Question'
request.query(query).then(function(resultset){
res.json(resultset.recordset);
}).catch(function(err){
console.log(err);
//res.json(err)
})
}
it's a bit hard to understand what you're doing there. But here is an promise example to use mssql
const sql = require('mssql')
sql.connect(config).then(pool => {
// Query
return pool.request()
.input('input_parameter', sql.Int, value)
.query('select * from mytable where id = #input_parameter')
}).then(result => {
console.dir(result)
// Stored procedure
return pool.request()
.input('input_parameter', sql.Int, value)
.output('output_parameter', sql.VarChar(50))
.execute('procedure_name')
}).then(result => {
console.dir(result)
}).catch(err => {
// ... error checks
})
sql.on('error', err => {
// ... error handler
})
source: https://www.npmjs.com/package/mssql#promises

Categories

Resources