I'm currently trying to fill a cassandra table with the content of an xlsx file, but I'm facing a problem.
I managed to do an array with all the queries necessary to fill my table ("insert into my_table (values name, ...) values (values, ...);" ).
So my array got like 7000 strings.
I'm then doing a promise and inside this promise I made a loop to fill and array of promises in order to trigger the first promise when all the promises are over.
this is the code I made
index.js
const ImportFileContent = require("./scripts/import_file_content")
const InsertDb = require("./scripts/insertDb")
const cassandra = require('cassandra-driver');
const databaseConfig = require('./config/database.json');
const authProvider = new cassandra.auth.PlainTextAuthProvider(databaseConfig.cassandra.username, databaseConfig.cassandra.password);
const db = new cassandra.Client({
contactPoints: databaseConfig.cassandra.contactPoints,
authProvider: authProvider
});
// ImportFileContent.importFileContent return an array of string, those strings contains all the 7000+ queries
ImportFileContent.importFileContent().then(queries => {
InsertDb.clients(db, queries).then(result => {
console.log(result);
db.shutdown(function (err, result) {
});
});
});
insertDb.js
let DB = null;
module.exports = {
ClientsLeasing: function (db, queries) {
DB = db;
return insertClientsLeasing(queries);
}
}
function insertClientsLeasing(queries) {
return new Promise((resolve, reject) => {
let nbError = 0;
let nbSuccess = 0;
let promisesArray = [];
//I made i <2000 here because my cassandra setup doesn't manage more than 2048 request in parallele
for (let i = 0; i < 2000; i++) {
promisesArray.push(new Promise(function (resolve, reject) {
DB.execute(queries[i], function (err, result) {
if (err) {
nbError++;
reject(err)
} else {
nbSuccess++;
resolve();
}
});
}));
}
Promise.all(promisesArray).then((result) => {
console.log("is over")
console.log("over ===================== success => ", nbSuccess, " errors => ", nbError);
resolve("success");
}).catch((error) => {
console.log(error);
console.log("is over error")
console.log("over ===================== success => ", nbSuccess, " errors => ", nbError);
resolve("error");
});
});
}
My table has two primary keys the creation date (which is now()) and the client id that may be in multiple rows of the xlsx (that can be the source of the problem ?).
So now when I launch this code my output is
then when I do a count in the table via cqls I only get 1962 rows with this output
I feel like I'm missing something in the way I use my promises. I don't really get it.
Thanks
Related
I am writing a small ElectronJS application to interact with a database using the NodeJS implementation of SQLite3.
I am able to store data in the database, no problems.
//renderer.js
document.querySelector('#new-store-add').addEventListener('click', _ => {
console.log("Adding new Store");
let storeName = document.querySelector("#new-store-name-input").value;
ipcRenderer.send('db-add-new-store', storeName);
});
//main.js
ipcMain.on('db-add-new-store', (e, r) => {
addStore(r);
});
//storemgr.js
exports.addStore = (newStore) => {
const sql = `INSERT INTO STORE (store_name) VALUES('${newStore}')`;
db.exec(sql);
}
Problems arise when I try to read and display data from the same table.
I have tried a couple of different approaches including the current async Promise approach
//renderer.js
document.querySelector('#main-get-stores').addEventListener('click', () => {
ipcRenderer.send('db-get-all-stores');
});
ipcRenderer.on('db-got-all-stores', (e,r) => {
console.log('r -> ', r);
console.log('e -> ', e);
console.log('e.stores -> ', e.stores);
})
//main.js
ipcMain.on('db-get-all-stores', async (e,r) => { //NOTE async here
console.log("Get All Stores");
let stores = await getAllStores();
console.log('stores -> ', stores);
e.sender.send('db-got-all-stores', stores);
});
//storemgr.js
exports.getAllStores = () => {
return new Promise((resolve, reject) => { //NOTE returns a Promise
const sql = "select * from STORE";
let res = [];
db.each(sql, (err, row) => {
console.log('row -> ', row); //NOTE actually correctly logs the data to the console
res.push(row);
});
console.log('res -> ', res); //NOTE this is still an empty array
resolve(res);
});
}
The stores variable in main.js is always undefined. So is res in storemgr.js.
My question is, what is the best approach to handle the Database connection when trying to display the data as soon as I have read it?
Forget about actually displaying the data for now, I am stumped as to why the return object is always undefined after I have already read it from the database.
I also tried different db functions such as .all, the result is much the same.
I am just starting to work with JS and SQLite. And could not find any help for my specific question.
I want to keep a return to use it as a foreign key in a second table.
This is my function:
async function getIdbyName(table, row, name) {
let nameNeu = '"' + name + '"';
let sql =
"SELECT id as print FROM " +
table +
" WHERE " +
row +
" = " +
nameNeu +
" LIMIT 1;";
// await db.get(sql, (err, row) => {
// console.log(row.print);
// return row;
// });
return await db.get(sql);
}
getIdbyName("...", "...", "...")
.then(function (value) {
console.log("Success!", value); // <-- prints: Success! undefined
})
.catch(function (err) {
console.log("Caught an error!", err);
});
console.log(getIdbyName("r_Tag", "r_Tag", "test")); //<-- shows me a Promise
What do I have to do so that promise does not stay undefined outside of the function?
Rest of the code:
var sqlite3 = require("sqlite3").verbose();
let db = new sqlite3.Database("./assets/db/test.db", (err) => {
if (err) {
return console.error(err.message);
}
console.log("Connected to the SQlite database.");
});
My other function just creat some strings and I run a few times db.run(...) to add some tables.
To put it more plainly, your getIdByName function never returns anything. You need to return the value you get back from await db.get(...). Once you do that, when you call getIdByName, you should get your response from the database.
You should also know that your code is susceptible to SQL injection, which is a major security vulnerability. Instead of concatenating a string, you should use a prepared statement.
async function getIdbyName(table, row, name) {
return await db.get(sql);
}
Update: Promise Wrapper for SQLlite - Aug 1, 2020
Based on this blog post, it seems it's not possible to do native async/await using sqlite3. However, you can write a wrapper function around db.all to return a promise, which will allow you to use async/await. Note the use of ? in the SQL statement, which will be replaced by the values of the array in the second argument following the same order. For more help with parameterized queries, read the params bullet point in the documentation here.
const sqlite3 = require("sqlite3").verbose();
const db = new sqlite3.Database("./assets/db/test.db", (err) => {
if (err) {
return console.error(err.message);
}
console.log("Connected to the SQlite database.");
});
db.query = function (sql, params = []) {
const that = this;
return new Promise(function (resolve, reject) {
that.all(sql, params, function (error, result) {
if (error) {
reject(error);
} else {
resolve(result);
}
});
});
};
async function getIdByName(table, name) {
// assemble sql statement
const sql = `
SELECT id
FROM ?
WHERE name = ?;
`;
return await db.query(sql, [table, name]);
}
// need async to call
(async () => {
const result = await getIdByName('books', 'my_name');
console.log(result);
})();
In node.js i have a databaseMapper.js file, that uses the Ojai node MapR api. to extract data. So far i have it working with single documents, but since this is an async api, i have a bit of issues with querying multiple documents.
This is what i have so far:
function queryResultPromise(queryResult) {
//this should handle multiple promises
return new Promise((resolve, reject) => {
queryResult.on("data", resolve);
// ...presumably something here to hook an error event and call `reject`...
});
}
const getAllWithCondition = async (connectionString, tablename, condition) =>{
const connection = await ConnectionManager.getConnection(connectionString);
try {
const newStore = await connection.getStore(tablename);
const queryResult = await newStore.find(condition);
return await queryResultPromise(queryResult);
} finally {
connection.close();
}
}
here it will only return the first because queryResultPromise will resolve on the first document.. however the callback with "data" may occur multiple times, before the queryResult will end like this queryResult.on('end', () => connection.close())
i tried using something like Promise.all() to resolve all of them, but I'm not sure how i include the queryResult.on callback into this logic
This will work
const queryResultPromise = (queryResult) => {
return new Promise((resolve, reject) => {
let result = [];
queryResult.on('data', (data) => {
result.push(data)
});
queryResult.on('end', (data) => {
resolve(result);
});
queryResult.on('error', (err) => {
reject(err);
})
});
};
I'm trying to use apolloFetch inside a Promise.all in my Node.js microservice but keep getting an error that the query is empty. The reason for using apolloFetch is to call another micro service and pass it an array of queries. Can someone give me some direction? My code is as follows:
const uri = "dsc.xxx.yyyy.com/abc/def/graphql";
const apolloFetch = CreateApolloFetch({uri});
const QryAllBooks = {
type: new GraphQLList(BookType),
args: {},
resolve() {
return new Promise((resolve, reject) => {
let sql = singleLineString`
select distinct t.bookid,t.bookname,t.country
from books_tbl t
where t.ship_status = 'Not Shipped'
`;
pool.query(sql, (err, results) => {
if (err) {
reject(err);
}
resolve(results);
const str = JSON.stringify(results);
const json = JSON.parse(str);
const promises = [];
for (let p = 0; p < results.length; p++) {
const book_id = json[p].bookid;
const query = `mutation updateShipping
{updateShipping
(id: ${book_id}, input:{
status: "Shipped"
})
{ bookid
bookname }}`;
promises.push(query);
}
//Below is the Promise.all function with the
//apolloFetch that calls another graphql endpoint
//an array of queries
Promise.all(promises.map(p => apolloFetch({p}))).then((result) => {
//this is the problem code^^^^^^^^^^^^^^^^^^^^^
resolve();
console.log("success!");
}).catch((e) => {
FunctionLogError(29, "Error", e);
});
});
});
}
};
module.exports = {
QryAllBooks,
BookType
};
It looks like apolloFetch requires query - you are passing p
change
Promise.all( promises.map(p=>apolloFetch({p})) )
to
Promise.all( promises.map(query=>apolloFetch({query})) )
You also call resolve twice
To resolve all errors or success
const final_results = []
Promise.all(promises.map(query => apolloFetch({
query,
}))).then((result) => {
final_results.push(result)
}).catch((e) => {
final_results.push(e)
}).then(() => {
resolve(final_results)
});
You immediately resolve or rejects once the pool.query() callback starts:
if(err){ reject(err);}resolve(results);
So unless the query fails, you never resolve with the results from the apolloFetch calls, since the promise is already resolved with the pool.query() results. I guess you're missing an else block:
if( err ) {
reject();
}
else {
const promises = ...
}
PS: you can try using node.js' util.promisify() to turn pool.query() into a promise as well so you can just write something resembling: query(...).then(results=>results.map(apolloFetch) instead of ahving to mix callbacks and promises.
I am currently building a Nodejs, Express, Sequelize (w. PostgreSQL) app, and have run into a few problems with using promises together with transactions and loops.
I am trying to figure out how to use a for loops in a transaction. I am trying to loop through a list of members and create a new user in the database for each of them.
I know the following code is wrong but it shows what I am trying to do.
Can anyone point me in the right direction?
var members = req.body.members;
models.sequelize.transaction(function (t) {
for (var i = 0; i < members.length; i++) {
return models.User.create({'firstname':members[i], 'email':members[i], 'pending':true}, {transaction: t}).then(function(user) {
return user.addInvitations([group], {transaction: t}).then(function(){}).catch(function(err){return next(err);});
})
};
}).then(function (result) {
console.log("YAY");
}).catch(function (err) {
console.log("NO!!!");
return next(err);
});
You should use a Promise.all
var members = req.body.members;
models.sequelize.transaction(function (t) {
var promises = []
for (var i = 0; i < members.length; i++) {
var newPromise = models.User.create({'firstname':members[i], 'email':members[i], 'pending':true}, {transaction: t});
promises.push(newPromise);
};
return Promise.all(promises).then(function(users) {
var userPromises = [];
for (var i = 0; i < users.length; i++) {
userPromises.push(users[i].addInvitations([group], {transaction: t});
}
return Promise.all(userPromises);
});
}).then(function (result) {
console.log("YAY");
}).catch(function (err) {
console.log("NO!!!");
return next(err);
});
I don't believe you need to catch within sequelize transactions as I think it jumps out to the catch on the transaction
Sorry for formatting. On mobile.
Promise.all will wait for all promises to return (or fail) before running the .then, and the .then callback will be all the promise data from each array
You'll need to use the built in looping constructs of bluebird which ships with sequelize:
var members = req.body.members;
models.sequelize.transaction(t =>
Promise.map(members, m => // create all users
models.User.create({firstname: m, email: m, 'pending':true}, {transaction: t})
).map(user => // then for each user add the invitation
user.addInvitations([group], {transaction: t}) // add invitations
)).nodeify(next); // convert to node err-back syntax for express
Depending on your implementation of Node.js this may help. I have the same setup using express, POSTGRES and sequelize.
Personally I'd prefer the async/await (ES6) implementation over then/catch as it is easier to read. Also creating a function that can be called externally improves re-usability.
async function createMemeber(req) {
let members = req.body.members;
for (var i = 0; i < members.length; i++) {
// Must be defined inside loop but outside the try to reset for each new member;
let transaction = models.sequelize.transaction();
try {
// Start transaction block.
let user = await models.User.create({'firstname':members[i], 'email':members[i], 'pending':true}, {transaction});
await user.addInvitations([group], {transaction}));
// if successful commit the record. Else in the catch block rollback the record.
transaction.commit();
// End transaction block.
return user;
} catch (error) {
console.log("An unexpected error occurred creating user record: ", error);
transaction.rollback();
// Throw the error back to the caller and handle it there. i.e. the called express route.
throw error;
}
}
}
if someone is looking for a solution with typescript v4.0.5 using async and await here is what worked out for me. Maybe you can use it on your javascript aplication too but it will depend on the version of it.
const array = ['one','two','three'];
const createdTransaction = sequelize.transaction();
const promises = array.map(async item => {
await model.create({
name: item,
},
{ transaction: createdTransaction },
);
});
Promise.all(promises).then(async values => {
await createdTransaction.commit();
});
First: https://caolan.github.io/async/docs.html
So, easily:
// requiring...
const async = require('async');
// exports...
createAllAsync: (array, transaction) => {
return new Promise((resolve, reject) => {
var results = [];
async.forEachOf(array, (elem, index, callback) => {
results.push(models.Model.create(elem, {transaction}));
callback();
}, err => {
if (err) {
reject(err);
}
else {
resolve(results);
}
});
});
}