After adding a user, the new user is not shown in the list. Everything works only after restarting the server. It looks like the server is downloading data from the cache memory. Maybe my code is not written properly. How to get current data from the database?
// addFile
const sqlite3 = require("sqlite3").verbose();
const db = new sqlite3.Database("./db/dbSqlite/app.db", sqlite3.OPEN_READWRITE);
const add = (user) => {
return new Promise((res, rej) => {
db.serialize(() => {
let status = false;
db.run(`INSERT INTO users(
login,
password,
) VALUES(?, ?)`, user, (err) => {
if (err) rej(status);
status = true;
res(status);
});
})
})
}
module.exports = add;
// getAll file
const sqlite3 = require("sqlite3").verbose();
const db = new sqlite3.Database("./db/dbSqlite/app.db");
const getAll = new Promise((res, rej) => {
db.all(`SELECT * from users`, (err, row) => {
if (row === undefined || err) {
res("NO_TABLE_USERS");
} else {
const stringified = JSON.stringify(row)
res(JSON.parse(stringified));
}
});
})
module.exports = getAll
// route
router.post("/add", helper.isLoggedIn, helper.isAdmin, (req, res) => {
let msg = "User created successfully!";
user.add(Object.values(req.body)
.then((state) => {
if (!state) msg = "Name already used!";
user.getAll
.then((result) => res.render("users", {
name: req.user,
users: result,
msg: msg
}));
}));
});
getAll should be a function, like you did with add. Otherwise, getAll will be resolved once :
// addFile
const sqlite3 = require("sqlite3").verbose();
const db = new sqlite3.Database("./db/dbSqlite/app.db", sqlite3.OPEN_READWRITE);
const add = (user) => {
return new Promise((res, rej) => {
db.serialize(() => {
let status = false;
db.run(`INSERT INTO users(
login,
password,
) VALUES(?, ?)`, user, (err) => {
if (err) rej(status);
status = true;
res(status);
});
})
})
}
module.exports = add;
// getAll file
const sqlite3 = require("sqlite3").verbose();
const db = new sqlite3.Database("./db/dbSqlite/app.db");
const getAll = () => {
return new Promise((res, rej) => {
db.all(`SELECT * from users`, (err, row) => {
if (row === undefined || err) {
res("NO_TABLE_USERS");
} else {
const stringified = JSON.stringify(row)
res(JSON.parse(stringified));
}
});
});
}
module.exports = getAll
// route
router.post("/add", helper.isLoggedIn, helper.isAdmin, (req, res) => {
let msg = "User created successfully!";
user.add(Object.values(req.body)
.then((state) => {
if (!state) msg = "Name already used!";
user.getAll()
.then((result) => res.render("users", {
name: req.user,
users: result,
msg: msg
}));
}));
});
Related
I have the following code to create a connection to my MongoDB database, and to store it for future use.
const mongodb = require('mongodb');
const MongoClient = mongodb.MongoClient;
// The database will be defined once a connection to between the cluster and mongodb is created
let _database;
const uri = '';
const databaseName = 'db';
const mongoConnect = () => {
MongoClient.connect(uri)
.then((client) => {
_database = client.db(databaseName);
})
.catch((err) => {
console.log(err);
throw err;
});
};
const getDb = () => {
if (_database) {
return _database;
}
throw 'No database found!';
};
module.exports = {
mongoConnect,
getDb
}
My problem is that _database is undefined until the connection is made. If my website tries to use the database before _database is defined it will throw an error and crash.
I want to make it so instead of crashing, other portions of my code would just wait until _database is not undefined. Sounds like a await/async solution is needed, but I can't wrap my head around how to approach implementing something like that here. Any advice would be great!
First approach: To make mongoConnect an async function and await on it before any of the remaining code is executed.
const mongoConnect = async () => {
try {
const client = await MongoClient.connect(uri);
_database = client.db(databaseName);
} catch(e) {
console.log(err);
throw err;
}
};
In the beginning of your code
await mongoConnect();
//Remaning code here
Second approach: To make getDb function await till database connection is available
const mongodb = require('mongodb');
const MongoClient = mongodb.MongoClient;
const uri = '';
const databaseName = 'db';
const databasePromise = new Promise((resolve, reject) => {
MongoClient.connect(uri)
.then((client) => {
resolve(client.db(databaseName));
})
.catch((err) => {
reject(err);
});
})
const getDb = async () => {
return await databasePromise;
};
module.exports = {
getDb
}
Sample code for you to run and check the second approach:
const databasePromise = new Promise((resolve) => {
console.log("Connecting to db in 5 seconds...")
setTimeout(() => {
console.log("Done")
resolve("done")
}, 5000)
})
const getDb = async () => {
return await databasePromise;
};
console.time("First_getDb_call")
getDb().then(res => {
console.timeEnd("First_getDb_call")
console.log(res)
console.time("Second_getDb_call")
getDb().then(res => {
console.timeEnd("Second_getDb_call")
console.log(res)
})
})
**Pretty Simple approach ** Just add await before MongoClient.connect function and make function async Now It will wait for the connection to have response then move forward.
const mongodb = require('mongodb');
const MongoClient = mongodb.MongoClient;
// The database will be defined once a connection to between the cluster
and mongodb is created
let _database;
const uri = 'mongodb://localhost:27017/mydb';
const databaseName = 'db';
const mongoConnect = async () => {
await MongoClient.connect(uri)
.then((client) => {
_database = client.db(databaseName);
})
.catch((err) => {
console.log(err);
throw err;
});
};
const getDb = () => {
if (_database) {
return _database;
}
throw 'No database found!';
};
module.exports = {
mongoConnect,
getDb
}
I have a Cloud Function written in Node JS that accesses data from BigQuery, converts this to CSV and exports to a Google Storage bucket.
Currently it executes and returns a 200, but does not run any of the code within my try/catch.
When testing it just returns:
Function execution took x ms. Finished with status code: 200
I've attempted to debug by adding console logs at various points, but it doesn't log anything - it just returns a 200.
exports.run_checks = (req, res) => {
"use strict";
let parsedBody = req.body;
let startCount = parsedBody.start;
let endCount = parsedBody.end;
(async function () {
try {
for (let i = startCount; i < endCount; i += 1) {
//Exclude overly large files here
if (i != 100) {
const query =
`SELECT *
FROM \`bq_dataset\`
WHERE id_number = ${i}`;
const options = {
query: query,
location: "europe-west2",
};
const [job] = await bigquery.createQueryJob(options);
console.log(`Job ${job.id} started.`);
const [rows] = await job.getQueryResults();
let id = rows[0].id;
const createFile = storage.bucket(bucketName).file(`${id}.csv`);
const csv = parse(rows, { fields });
const dataStream = new stream.PassThrough();
dataStream.push(csv);
dataStream.push(null);
await new Promise((resolve, reject) => {
console.log(`Writing ${id} to GCS`);
dataStream
.pipe(
createFile.createWriteStream({
resumable: false,
validation: false,
metadata: { "Cache-Control": "public, max-age=31536000" },
})
)
.on("error", (error) => {
console.error("Stream failed", error);
reject(error);
})
.on("finish", () => {
resolve(true);
});
});
}
}
res.status(200).send();
} catch (err) {
res.send(err);
}
})();
};
Your function is not async. The host has no idea that you are still doing something in your function, it returns without any error.
Modify your arrow function to async, and no need for IIFE, remove it, or await it, that is also important!
exports.run_checks = async (req, res) => {
"use strict";
let parsedBody = req.body;
let startCount = parsedBody.start;
let endCount = parsedBody.end;
try {
for (let i = startCount; i < endCount; i += 1) {
//Exclude overly large files here
if (i != 100) {
const query =
`SELECT *
FROM \`bq_dataset\`
WHERE id_number = ${i}`;
const options = {
query: query,
location: "europe-west2",
};
const [job] = await bigquery.createQueryJob(options);
console.log(`Job ${job.id} started.`);
const [rows] = await job.getQueryResults();
let id = rows[0].id;
const createFile = storage.bucket(bucketName).file(`${id}.csv`);
const csv = parse(rows, { fields });
const dataStream = new stream.PassThrough();
dataStream.push(csv);
dataStream.push(null);
await new Promise((resolve, reject) => {
console.log(`Writing ${id} to GCS`);
dataStream
.pipe(
createFile.createWriteStream({
resumable: false,
validation: false,
metadata: { "Cache-Control": "public, max-age=31536000" },
})
)
.on("error", (error) => {
console.error("Stream failed", error);
reject(error);
})
.on("finish", () => {
resolve(true);
});
});
}
}
res.status(200).send();
} catch (err) {
res.send(err);
}
};
I'm using transactions in a function using nodejs and mysql npm. currently the transactional is not working, if there is an error in insert the delete query previously gets executed and I lose the row - I assume reason is that mysql comes with autocommit already enabled. is there anyway to turn off autocommit through javascript programmatically?
const mysql = require('mysql')
const config = require('../config')[process.env.NODE_ENV || 'development']
const dbConfig = {
connectionLimit: 10,
host: config.database.host,
user: config.database.user,
password: config.database.password,
database: config.database.database,
}
const pool = mysql.createPool(dbConfig);
const connection = () => {
return new Promise((resolve, reject) => {
pool.getConnection((err, connection) => {
if (err) {
reject(err);
}
const query = (sql, binding) => {
return new Promise((resolve, reject) => {
connection.query(sql, binding, (err, result) => {
if (err) {
reject(err);
}
resolve(result);
});
});
};
const release = () => {
return new Promise((resolve, reject) => {
if (err) {
reject(err);
}
resolve(connection.release());
});
};
resolve({ query, release });
});
});
};
const query = (sql, binding) => {
return new Promise((resolve, reject) => {
pool.query(sql, binding, (err, result, fields) => {
if (err) {
reject(err);
}
resolve(result);
});
});
};
module.exports = { pool, connection, query };
and this is my function:
const connection = await db.connection();
try {
await connection.query("START TRANSACTION");
await db.query('delete from X where id=?', [bar.id]);
await db.query('INSERT INTO X (...) values (...)
.
await connection.query("COMMIT");
} catch(e) {}
You're querying from your pool not the connection/client where you're are starting the transaction and commiting.
Change db.query to connection.query
const connection = await db.connection();
try {
await connection.query("START TRANSACTION");
await connection.query('delete from X where id=?', [bar.id]);
await connection.query('INSERT INTO X (...) values (...)
await connection.query("COMMIT");
I can't seem to figure out how to save the results of SomeQuery promise. Essentially I would like to take the value in res and pipe it into parseQuery function and return the final results. How do I make the parsed result accessible to an APIs response.
const neo4j = require('neo4j-driver')
var parser = require('parse-neo4j')
const astria_queries = require('./astriaQueries')
const uri = 'bolt://astria_graph:7687'
const user = 'xxx'
const password = 'xxx'
const someQuery = (query) => {
// run statement in a transaction
const driver = neo4j.driver(uri, neo4j.auth.basic(user, password))
const session = driver.session({ defaultAccessMode: neo4j.session.READ })
const tx = session.beginTransaction()
tx.run(query)
.then((res) => {
// Everything is OK, the transaction will be committed
parseQuery(res)
})
.then(() => {
// Everything is OK, the transaction will be committed
})
.catch((e) => {
// The transaction will be rolled back, now handle the error.
console.log(e)
})
.finally(() => {
session.close()
driver.close()
})
}
const parseQuery = (result) => {
try {
const test = parser.parse(result)
console.log(test)
} catch (err) {
console.log(err)
}
}
module.exports = {
someQuery,
}
It finally clicked with me. Here is the solution I came up with. Hopefully it will help others. If there is a better way please let me know. Thank you #fbiville for you help.
async actions
const neo4j = require('neo4j-driver')
var parser = require('parse-neo4j')
const astria_queries = require('./astriaQueries')
const uri = 'bolt://astria_graph:7687'
const user = 'neo4j'
const password = 'neo'
async function getRecords(query) {
// run statement in a transaction
const driver = neo4j.driver(uri, neo4j.auth.basic(user, password))
const session = driver.session({ defaultAccessMode: neo4j.session.READ })
const tx = session.beginTransaction()
try {
const records = await tx.run(query)
const parseRecords = await parseQuery(records)
return parseRecords
} catch (error) {
console.log(error)
} finally {
session.close()
driver.close()
}
}
async function parseQuery(result) {
try {
const parsedRes = await parser.parse(result)
// console.log(parsedRes)
return parsedRes
} catch (err) {
console.log(err)
}
}
// getRecords(astria_queries.get_data_sources)
module.exports = {
getRecords,
}
api send()
exports.get_data_sources = async (req, res) => {
try {
queryFuns.getRecords(astria_queries.get_data_sources).then((response) => {
res.send(response)
})
} catch (error) {
res.status(500).send(error)
console.log(error)
}
}
I am still very new to node.js. In my current test project I want to send a confirmation email or other emails, depending on the loaded template. The template is stored in MySQL.
The result I am getting is:
{
"message": {
"error": {},
"foo": "bar"
}
}
So the error bit is empty and I don't know why...
If I reject manually at a different point in the code it works just fine, so the problem is not with the middleware, router or server.js file.
Also I have rejected "Foo: Bar" back, to check which catch block catched the error.
Here is my mailer.js file:
const nodemailer = require('nodemailer');
let conDB;
module.exports = (injectedMySql) => {
conDB = injectedMySql
return {
sendMail: sendMail
}
}
const sendMail = (mail) => {
return new Promise((resolve,reject) => {
loadTemplate(mail.templateId, mail.languageId)
.then(data => {
const mailserver = {
host: "something.com",
port: 465,
secure: true, // use TLS
auth: {
user: "something#something.com",
pass: "PASSWORD"
},
tls: {
// do not fail on invalid certs
rejectUnauthorized: false
}
};
const body = {
from: 'something#something.com',
to: mail.toAdress,
subject: allReplace(data.subject, mail.subjectReplace),
text: allReplace(data.body, mail.textReplace),
html: allReplace(data.html, mail.htmlReplace)
}
// create a nodemailer transporter using smtp
let transporter = nodemailer.createTransport(mailserver)
transporter.sendMail(body)
.then(data => {console.log(data)
resolve(data)
})
.catch(err => {reject("sendMail problem")})
})
.catch(error => {reject({"error": error, "foo": "bar"})})
})
}
function allReplace (str, obj) {
var retStr = str;
for (var x in obj) {
retStr = retStr.replace(new RegExp(x, 'g'), obj[x]);
}
return retStr;
};
const loadTemplate = (mailTemplate, languageId) => {
return new Promise((resolve,reject) => {
if(mailTemplate === null || languageId === null)
reject("nop, something is missing");
else
{
if (typeof conDB.query === "function")
{
conDB.query('SELECT * FROM email_template WHERE language_id = ? AND template_id = ?', [mailTemplate,languageId])
.then(data => {resolve(data)})
.catch(err => {reject("mysql has a problem")})
}
else
{
reject("function is not available");
}
}
})
}
Here is my mysql.js file:
var mysql = require('mysql2/promise');
const databaseConfigs = {
host: 'localhost',
user: 'USERNAME',
password: 'PASSWORD',
database: 'DBNAME'
};
const createID = table => {
return new Promise((resolve,reject) => {
//execute the query to register the user
let query = '';
let id = Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15)
query = `SELECT * FROM ${table} WHERE id = ?`
this.query(query,[table,id])
.then(data => {
console.log(data[0].length)
if(data[0].length==0)
{
resolve(id)
}
else
{
createID(table)
.then(data => {resolve(data)})
.catch(error => {reject(error)})
}
})
.catch(error => {reject(error)})
})
}
async function query (sql,att) {
let connection = await mysql.createConnection(databaseConfigs);
return new Promise( ( resolve, reject ) => {
console.log(`Query: '${sql}'`);
connection.query(sql,att)
.then(data => {resolve(data)})
.catch(error => {reject(error)})
connection.end();
});
}
async function transaction(queries, queryValues) {
if (queries.length !== queryValues.length) {
return Promise.reject(
'Number of provided queries did not match the number of provided query values arrays'
)
}
const connection = await mysql.createConnection(databaseConfigs)
try {
await connection.beginTransaction()
const queryPromises = []
queries.forEach((query, index) => {
queryPromises.push(connection.query(query, queryValues[index]))
})
const results = await Promise.all(queryPromises)
await connection.commit()
await connection.end()
return results
} catch (err) {
await connection.rollback()
await connection.end()
return Promise.reject(err)
}
}
module.exports.transaction = transaction;
module.exports.query = query;
module.exports.createID = createID;
Thanks to you all!
Chris
I cleand up your code a bit. Specially the error handling as you always mask your errors with your Promise.reject("message").
I think what confused you is that you're already using libraries which work with promise (you don't need to wrap those into promises again). Thats quite good as you just can use async/await then.
I hope it helps. If something is unclear just ask.
const nodemailer = require('nodemailer');
let conDB;
module.exports = (injectedMySql) => {
conDB = injectedMySql
return {
sendMail: sendMail
}
}
// your load template function already uses promises no need to wrap it
const sendMail = async mail => {
const data = await loadTemplate(mail.templateId, mail.languageId)
const mailserver = {
host: "something.com",
port: 465,
secure: true, // use TLS
auth: {
user: "something#something.com",
pass: "PASSWORD"
},
tls: {
// do not fail on invalid certs
rejectUnauthorized: false
}
};
const body = {
from: 'something#something.com',
to: mail.toAdress,
subject: allReplace(data.subject, mail.subjectReplace),
text: allReplace(data.body, mail.textReplace),
html: allReplace(data.html, mail.htmlReplace)
}
// create a nodemailer transporter using smtp
let transporter = nodemailer.createTransport(mailserver)
try {
// Return the value of sendmail
return await transporter.sendMail(body);
} catch (err) {
// handle error or throw it. I'll throw as you rejected the Promise here it.
// this part will actually help you as you now can see the correct error instead of your rejected "foo bar" erro object
throw err;
}
}
function allReplace(str, obj) {
var retStr = str;
for (var x in obj) {
retStr = retStr.replace(new RegExp(x, 'g'), obj[x]);
}
return retStr;
};
const loadTemplate = async (mailTemplate, languageId) => {
if (mailTemplate === null || languageId === null)
throw new Error("nop, something is missing");
else {
if (typeof conDB.query === "function") {
try {
const data = await conDB.query('SELECT * FROM email_template WHERE language_id = ? AND template_id = ?', [mailTemplate, languageId]);
} catch (err) {
// it's better to use the real error you always hide the real reason why something went wrong with your promise reject :).
throw err;
}
}
else {
throw new error("function is not available");
}
}
}
.
var mysql = require('mysql2/promise');
const databaseConfigs = {
host: 'localhost',
user: 'USERNAME',
password: 'PASSWORD',
database: 'DBNAME'
};
const createID = async table => {
// use GUID? https://www.npmjs.com/package/guid
let id = Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15)
let query = `SELECT * FROM ${table} WHERE id = ?`
try {
data = await this.query(query, [table, id]);
} catch (error) {
// as we throw the error in query we got to catch it here
// handle it or throw it (I throw it because I can't handle it ;).)
throw error;
}
console.log(data[0].length)
if (data[0].length == 0) {
return id;
} else {
return await createID(table);
}
}
const query = async (sql, att) => {
let connection = await mysql.createConnection(databaseConfigs);
console.log(`Query: '${sql}'`);
try {
const data = await connection.query(sql, att);
return data;
} catch (error) {
// Handle error or throw it again
// you rejected the promise so i throw it here
throw error;
} finally {
connection.end();
}
}
// I changed it to make it the same as the other functions from this
// async function transaction(queries, queryValues) { to
const transaction = async (queries, queryValues) => {
if (queries.length !== queryValues.length) {
// just throw an error
throw new Error('Number of provided queries did not match the number of provided query values arrays');
}
const connection = await mysql.createConnection(databaseConfigs)
try {
await connection.beginTransaction()
const queryPromises = []
queries.forEach((query, index) => {
queryPromises.push(connection.query(query, queryValues[index]))
})
const results = await Promise.all(queryPromises)
await connection.commit()
await connection.end()
return results
} catch (err) {
await connection.rollback()
await connection.end()
// this is not needed
// return Promise.reject(err)
// if you don't want to handle it here just throw the error
throw err;
}
}
module.exports.transaction = transaction;
module.exports.query = query;
module.exports.createID = createID;