PostgreSql results printing to console but not displaying in browser - javascript

I'm trying to get some data from a pg database to my api endpoint , I can print the results to the console but I can't get them to display in the browser with res.send. I'm guessing the problem is with global and local scope however I've not been able to figure it out. I'm using ES6 but transpiling with babel. Here's a snippet.
app.get('/', (request, response) => {
const { Pool, Client } = require('pg');
const config = {
user: '',
host: '',
database: '',
password: '',
port: ,
}
const pool = new Pool(config);
const client = new Client(config);
let whole = [];
client.connect();
const text = "SELECT * FROM entries where id='1'";
client.query(text)
.then(res => {
console.log(res.rows[0]);
whole.push(res.rows[0]);
})
.catch(e => console.error(e.stack));
response.send(whole);
client.end;
});
This logs to the console
{ id: 1, title: 'First title', body: 'beautiful body' }
However the browser only displays []
This is what babel transpiles it to which is the script I run in node.
var whole = [];
client.connect();
var text = "SELECT * FROM entries where id='1'";
client.query(text).then(function (res) {
console.log(res.rows[0]);
whole.push(res.rows[0]);
}).catch(function (e) {
return console.error(e.stack);
});
response.send(whole);
client.end;

response.send is called outside of the async promise .then resolver, and is therefore executed before you push the row data into the array. Moving response.send into the promise resolver should fix it.
client.query(text).then(res => {
whole.push(res.rows[0]);
client.end();
response.send(whole);
}).catch((e) => {
console.error(e.stack);
});
Alternatively, you can use async/await depending on your babel version and presets/plugins.
const { Client } = require("pg");
const config = {...};
const queryText = "SELECT * FROM entries where id='1'";
app.get("/", async (request, response) => {
const client = new Client(config);
await client.connect();
try {
const queryResponse = await client.query(queryText);
// Send response without pushing to array
response.send(queryResponse.rows[0]);
client.end();
} catch (e) {
console.error(e.stack);
}
});

Related

Close MONGODB connection after data insertion

I want to populate my database with some random data. I have used Faker.js for generating that data. I'm using MongoDB on my localhost and all the data is properly following all the validation rules from the schema. I'm having problem with the closing connection of my connection after insertion of data. I want to close the connection soon after the data is populated. I'm using async function to be aware of all the things but something is not going right.
Here is my code seeds.js which is the script im using to populate database
const path = require("path");
require("dotenv").config({ path: path.resolve(__dirname, "../.env") });
var mongoose = require("mongoose");
mongoose.connect(process.env.MONGODB_URI);
require("../models/User");
require("../models/Item");
require("../models/Comment");
var Item = mongoose.model("Item");
var Comment = mongoose.model("Comment");
var User = mongoose.model("User");
const ItemData = require("../data/item.json");
const CommentData = require("../data/comment.json");
const UserData = require("../data/user.json");
async function InsertData() {
ItemData.forEach(async (item) => {
item.seller = item.seller.$oid;
const oldItem = await Item.find({ title: item.title });
if (!oldItem.length) {
var newItem = new Item(item);
await newItem.save();
} else {
console.log(item.slug);
}
});
UserData.forEach(async (user) => {
const oldUser = await User.find({ username: user.username });
if (!oldUser.length) {
var user = new User(user);
await user.save();
} else {
console.log(user.username);
}
});
CommentData.forEach(async (comment) => {
comment.item = comment.item.$oid;
comment.seller = comment.seller.$oid;
var newComment = new Comment(comment);
const oldComment = await Comment.find({ _id: newComment.id });
if (!oldComment.length) {
await newComment.save();
} else {
console.log(comment.body);
}
});
}
async function cleanup() {
await Item.deleteMany({}, () => console.log("Data Cleared Item"));
await Comment.deleteMany({}, () => console.log("Data Cleared Comment"));
await User.deleteMany({}, () => console.log("Data Cleared User"));
}
async function main() {
InsertData().then(async () => {
console.debug('Data Inserted. Closing connection.');
await mongoose.connection.close();
});
}
main();
Here is the stack trace of the error
/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/core/connection/pool.js:841
cb(new MongoError('pool destroyed'));
^
MongoError: pool destroyed
at Pool.write (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/core/connection/pool.js:841:8)
at _command (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/core/wireprotocol/command.js:120:10)
at command (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/core/wireprotocol/command.js:28:5)
at Object.query (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/core/wireprotocol/query.js:66:3)
at Server.query (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/core/topologies/server.js:644:16)
at FindOperation.execute (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/operations/find.js:38:12)
at /Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/operations/execute_operation.js:144:17
at Server.selectServer (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/core/topologies/server.js:832:3)
at Server.selectServer (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/topologies/topology_base.js:342:32)
at executeWithServerSelection (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/operations/execute_operation.js:131:12)
at /Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/operations/execute_operation.js:70:9
at maybePromise (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/utils.js:685:3)
at executeOperation (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/operations/execute_operation.js:34:10)
at Cursor._initializeCursor (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/core/cursor.js:534:7)
at Cursor._initializeCursor (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/cursor.js:186:11)
at nextFunction (/Users/karnikkanojia/Desktop/Anythink-Market-21cto/backend/node_modules/mongodb/lib/core/cursor.js:737:10)
error Command failed with exit code 1.

Reusing SQL server Database Connections With Azure Functions Using Javascript

I cannot find clear information on how to manage SQL server database connections from an Azure function written in Javascript.
I am using a connection pool code -
const pool = new sql.ConnectionPool(config);
const poolConnect = pool.connect();
pool.on('error', err => {
// ... error handler
})
and I am using the poolConnect object from the function which is executing the query
export const selectQuery = function() {
const connectionPool = await mssqlDBPoolConnect;
const request = connectionPool.request();
await request.query('select query');
}
So how can I use the same connection pool across all azure functions.
Create two folder named config and toolkit under your root path. Put your db.js in config folder, and create a sql helper class to export a function named sqltools.js in toolkit folder.
So you could use the same connection pool by calling sqltools in your function's code. This step help you to reduce using the same code in every function.
Try use the db.js code below:
const sql = require('mssql')
const config = {
user: 'yourusername',
password: 'yourpassword',
server: 'yoursqlserver.database.windows.net', // You can use 'localhost\\instance' to connect to named instance. Do not use TCP.
database: 'yourdb',
"options": {
"encrypt": true,
"enableArithAbort": true
}
}
const poolPromise = new sql.ConnectionPool(config)
.connect()
.then(pool => {
console.log('Connected to MSSQL')
return pool
})
.catch(err => console.log('Database Connection Failed! Bad Config: ', err))
module.exports = {
sql, poolPromise
}
The sqltools.js class:
const { poolPromise } = require('../config/db')
module.exports.sqltools = {
ExecSqlQuery : async function(arg){
const pool = await poolPromise
//SELECT *FROM SYSOBJECTS WHERE xtype = \'U\'
var result=null;
try {
result = await pool.request()
.query(arg)
} catch (error) {
console.log(error.message);
}
return result;
},
ExecProce : function (arg2, arg3, arg4){
console.log(arg2,arg3,arg4);
}
}
Here is my HttpTrigger1 index.js code, call ExecSqlQuery to exec sqlstrings:
const { sqltools } = require('../toolkit/sqltools');
module.exports = async function (context, req) {
context.log('JavaScript HTTP trigger function processed a request.');
var result=null;
try {
// call ExecSqlQuery func
result = await sqltools.ExecSqlQuery('SELECT *FROM SYSOBJECTS WHERE xtype = \'U\'');
} catch (error) {
console.log(error.message);
}
const responseMessage ="Func 1 Result : TableName= " + result.recordset[0].name;
context.res = {
// status: 200, /* Defaults to 200 */
body: responseMessage
};
}

DynamoDB update does not console.log any output

I have the following code. This code is supposed to receive an SQS message, read the body, then update a dynamo record with the information contained within that body. The update is not working which is one issue, but even stranger I'm not getting any output from the dynamodb update. The last line of output is the console.log which details the SQS message, then the function ends.
How is this possible? Shouldn't dynamo return some kind of output?
console.log('Loading function');
const util = require('util')
const AWS = require('aws-sdk');
var documentClient = new AWS.DynamoDB.DocumentClient();
exports.handler = async(event) => {
//console.log('Received event:', JSON.stringify(event, null, 2));
for (const { messageId, body } of event.Records) {
//const { body } = event.Records[0];
//console.log(body)
console.log('SQS message %s: %j', messageId, body);
const JSONBody = JSON.parse(body)
//const message = JSON.parse(test["Message"]);
const id = JSONBody.id;
const city = JSONBody.City;
const address = JSONBody.Address;
const params = {
TableName: 'myTable',
Key: {
ID: ':id',
},
UpdateExpression: 'set address = :address',
ExpressionAttributeValues: {
':id': id,
':address': address,
':sortKey': "null"
}
//ReturnValues: "UPDATED_NEW"
};
documentClient.update(params, function(err, data) {
if (err) console.log(err);
else console.log(data);
});
}
return `Successfully processed ${event.Records.length} messages.`;
};
There're a couple of ways to do this, but I'm not sure about your use cases: Are operations are critical? Do the failed items need to be handled? Are performance need to be boosted as the large dataset? etc...
// I'm not recommend to this implementation
const { DynamoDB } = require('aws-sdk');
const documentClient = new DynamoDB.DocumentClient();
exports.handler = async (event) => {
for (const { messageId, body } of event.Records) {
console.log('SQS message %s: %j', messageId, body);
// Parse json is dangerous without knowing the structure, remember to handle
// when error occured
const JSONBody = JSON.parse(body)
const id = JSONBody.id;
const address = JSONBody.Address;
const params = {
TableName: 'myTable',
Key: {
ID: ':id',
},
UpdateExpression: 'set address = :address',
ExpressionAttributeValues: {
':id': id,
':address': address,
':sortKey': "null"
},
ReturnValues: "UPDATED_NEW"
};
// Wait for each update operation to finished
// IO time will be extended
await documentClient.update(params)
.promise()
.then(res => {
console.log(res)
})
.catch(err => {
console.error(err);
})
}
// In case there's a failed update operation, this message still be returned by lambda handler
return `Successfully processed ${event.Records.length} messages.`;
};
// My recommended way
const AWS = require('aws-sdk');
const documentClient = new AWS.DynamoDB.DocumentClient();
exports.handler = async (event) => {
// All the update operation is fired nearly concurrently
// IO will be reduced
return Promise.all(event.Records.map(({ messageId, body }) => {
console.log('SQS message %s: %j', messageId, body);
// Parse json is dangerous without knowing the structure, remember to handle
// when error occured
const JSONBody = JSON.parse(body)
const id = JSONBody.id;
const address = JSONBody.Address;
const params = {
TableName: 'myTable',
Key: {
ID: ':id',
},
UpdateExpression: 'set address = :address',
ExpressionAttributeValues: {
':id': id,
':address': address,
':sortKey': "null"
},
ReturnValues: "UPDATED_NEW"
};
return documentClient.update(params)
.promise()
.then(res => {
console.log(res)
})
}))
// When lambda handler finised all the update, lambda handler return a string
.then(() => {
return `Successfully processed ${event.Records.length} messages.`
})
// In case any of the update operation failed, the next update operations is cancelled
// Lambda handler return undefined
.catch(error => {
console.error(error);
// return some error for lambda response.
})
};
P/s: My two cents, before you do any kind of Lamba development with node.js runtime, you should understand the differences between callbacks, promises, await/async in javascript.
Fixed it by making the method synchronous, i.e removed async from the function def

Hyperledger query never return results

I`m trying to query my business network using buildQuery but it always returns an empty array.
My code is as follows.
This is the connection.js file:
module.exports = {
BusinessNetworkConnection : require('composer-client').BusinessNetworkConnection,
cardName : '',
connection: {},
connect : function() {
var cardType = { type: 'composer-wallet-filesystem' }
this.connection = new this.BusinessNetworkConnection(cardType);
return this.connection.connect(this.cardName);
},
disconnect : function(callback) {
this.connection.disconnect();
}
};
This is my query.js file which being invoked to get results:
const connection = require('./connection');
const getContacts = async (cardName,companyID) => {
connection.cardName = cardName;
try {
await connection.connect();
main();
} catch (error) {
main(error);
}
async function main(error) {
if (error) { return new Error("Ops Error: ",error) };
const statement = 'SELECT org.finance.einvoice.participant.Company WHERE (participantId == _$companyID)'
const query = await connection.connection.buildQuery(statement);
const company = await connection.connection.query(query, { companyID }).catch(err => {return new Error(err)});
await connection.connection.disconnect().catch(err => new Error(err));
console.log(company);
return company;
};
};
module.exports = {
getContacts
};
The expected behavior from getContacts() is to return an asset from business network but it actually returns an empty array.
Current versions: composer-cli 0.20 , composer-playground 0.20 , composer-client 0.20 , composer-common 0.20 and fabric-dev-server 1.2 .
i found the solution for this issue.
i was using card which was not allowed to perform queries. However, when i used the admin card it returned with results.
other way is to allow participants to issue queries in permission.acl file.

node.js async/await using with MySQL

I need to get all results synchronized and append to a string with async/await keywords like c#.
I am new to node.js and I can not adapt this new syntax to my code.
var string1 = '';
var string2 = '';
var string3 = '';
var string4 = '';
DatabasePool.getConnection(function(err, connection) {
connection.query(query,function (err, result) {
if (err){};
string1 = result;
});
connection.query(query,function (err, result) {
if (err){};
string2 = result;
});
connection.query(query,function (err, result) {
if (err){};
string3 = result;
});
connection.query(query,function (err, result) {
if (err){};
string4 = result;
});
//I need to append all these strings to appended_text but
//all variables remain blank because below code runs first.
var appended_text = string1 + string2 + string3 + string4;
});
if you happen to be in Node 8+, you can leverage the native util.promisify() with the node mysql.
Do not forget to call it with bind() so the this will not mess up:
const mysql = require('mysql'); // or use import if you use TS
const util = require('util');
const conn = mysql.createConnection({yourHOST/USER/PW/DB});
// node native promisify
const query = util.promisify(conn.query).bind(conn);
(async () => {
try {
const rows = await query('select count(*) as count from file_managed');
console.log(rows);
} finally {
conn.end();
}
})()
Use mysql2 packet. It has promise wrapper so you can do that:
async function example1 () {
const mysql = require('mysql2/promise');
const conn = await mysql.createConnection({ database: test });
let [rows, fields] = await conn.execute('select ?+? as sum', [2, 2]);
}
Assuming that your ORM that you are using it promise-based you can do something like this
async function buildString() {
try {
const connection = await DatabasePool.getConnection();
const string1 = await connection.query(query);
const string2 = await connection.query(query);
const string3 = await connection.query(query);
const string4 = await connection.query(query);
return string1 + string2 + string3 + string4;
} catch (err) {
// do something
}
}
Any promise can be used with async/await by putting await in front of the call. However, notice that this function must be used within an async function "wrapper". You need to handle the errors in try/catch blocks.
I also want to point out that these 4 queries are not run simulatneously. You'll still need to use Promise.all for that.
If you want to use mysql (also called mysqljs) you have to do a little bit of work if you don't want to use a wrapper. But it's easy enough. Here is how the connect function would look like:
const mysql = require('mysql')
var my_connection = mysql.createConnection({ ... })
async function connect()
{
try
{
await new Promise((resolve, reject) => {
my_connection.connect(err => {
return err ? reject(err) : resolve()
})
})
}
catch(err)
{
...handle errors...
}
}
connect()
As you can see the await will know how to handle a promise. You create such and use the resolve/reject functions in the callback implementation. That's all there is to it, really, so using a wrapper may be a bit much unless you access your database a lot.
As stated by LeOn - Han Li, i include small modifications, since I had to work with the result.
var mysql = require('mysql');
const util = require('util');
const conn = mysql.createConnection({
host : '127.0.0.1',
user : 'user',
password : 'password',
database : 'database'
});
const query = util.promisify(conn.query).bind(conn);
let result = async function() {
var userCourse = [];
try {
const rows = await query('select * as count from file_managed');
} finally {
conn.end();
return userCourse;
}
};
result()
.then(value => {
console.log(value)
});
Or use mysql-async-simple
https://www.npmjs.com/package/mysql-async-simple
const { makeDb } = require('mysql-async-simple');
const mysql = require("mysql");
const connection = mysql.createConnection({
host: process.env.HOST,
user: process.env.USER,
password: process.env.PASSWORD,
database: process.env.DB
});
const db = makeDb();
await db.connect(connection);
try {
const users = await db.query(connection, 'SELECT * FROM users');
} catch (e) {
// handle exception
} finally {
await db.close(connection);
}
You can use the promise-mysql package like so:
const mysql = require('promise-mysql')
const getDbConnection = async () => {
return await mysql.createConnection({
host: process.env.HOST,
user: process.env.USER,
password: process.env.PASSWORD,
database: process.env.DB
})
}
const getUsers = async () => {
const db = await getDbConnection()
const users = await db.query("SELECT * FROM users")
await db.end()
return users
}
You would have to make sure that the mysql library you are using either supports Promises, which are required by async/await, or use a tool like Bluebird's promisifyAll to wrap the library.
async function appendedText() {
const connection = await DatabasePool.getConnectionAsync();
const [string1, string2, string3, string4] = await [
connection.query(query1),
connection.query(query2),
connection.query(query3),
connection.query(query4),
];
return string1 + string2 + string3 + string4;
}
Note that calling appendedText() will actually return a Promise and not a value.
appendedText().then(appended_text => {});
It seems you use mysqljs which isn't a promised based library. So you can't achieve what you want using this library. So what you can do is use a promised based library like Sequelize or else as a comment suggests:
use a tool like Bluebird's promisifyAll to wrap the library.
I don't know much about wrapping thing, so what I did was to switch to the sequelize.
Instead of using util or promise/mysql we can implement promise inside mysql.connect
var con = require('mysql');
var mysql = con.createConnection({
host: "localhost",
user: "root",
password: "pass",
database: "test"
});
async function asyncAwait(req, res) {
var promise1;
mysql.connect((err) => {
promise1 = new Promise((resolve, reject) => {
console.log('Mysql: Connected');
resolve(response.write(uc.upperCase('Connected\n')));
});
promise1
.then(() => {
//Implement the logic here
})
.catch(error => {
console.log(error)
});
})
}
await asyncAwait();
const { makeDb } = require('mysql-async-simple');
const mysql = require("mysql");
const connection = mysql.createConnection({
host: process.env.HOST,
user: process.env.USER,
password: process.env.PASSWORD,
database: process.env.DB
});
const db = makeDb();
await db.connect(connection);
try {
const users = await db.query(connection, 'SELECT * FROM users');
} catch (e) {
// handle exception
} finally {
await db.close(connection);
}

Categories

Resources