How should I handle a db connection in Javascript / AWS Lambda - javascript

In my JS Lambda function I have something along the lines of the following...
import utils from './utils'
index.handler() {
return utils.initDB()
.then(function() {
return utils.doSomething()
utils.js:
var dbConfig = null;
var knex = null;
function initDB() {
dbConfig = require('../db');
knex = require('knex')(dbConfig);
return;
}
Basically, how should I pass around the knex object? Is it okay to have as a global var in the utils file? Should I return it to the handler and pass it into every smsUtils.doX call? I'm thinking this might be causing problems with db connection/pooling, but I don't know how to find out.

For anyone who stumbles on this in the future (i.e. me when I'm googling how to do this again in a year):
http://blog.rowanudell.com/database-connections-in-lambda/ explains connection reuse in Lambda. Should look something like this:
const pg = require('pg');
const client = new pg.Client('postgres://myrds:5432/dbname');
client.connect();
exports.handler = (event, context, cb) => {
client.query('SELECT * FROM users WHERE ', (err, users) => {
// Do stuff with users
cb(null); // Finish the function cleanly
});
};

Related

convert nodejs function to AWS Lambda compatible

I am pretty new to both nodejs and AWS Lambda. I have created a very small nodejs function that is working fine locally. Now I need to run it on AWS Lambda, but looks like there are some handlers requirement which I am not understanding completely.
Below is my nodejs function that I need to run it on Lambda. Any idea what changes do I need to make to execute it on AWS? Thanks
(async function () {
DOMAIN = "abc.xyz.com";
KEY = "***";
const mailchimpClient = require("#mailchimp/mailchimp_transactional")(KEY);
const run = async () => {
const response = await mailchimpClient.senders.addDomain({
domain: DOMAIN,
});
console.log(response);
};
run();
})();
Basically you just need to export a function with a specific name: handler
exports.handler = async function(event, context) {
console.log("EVENT: \n" + JSON.stringify(event, null, 2))
return "foo.bar"
}
In this handler, you just need to return something to mark as success or throw an error to mark as failure.
In your case, this should work:
var DOMAIN = "abc.xyz.com";
var KEY = "***";
const mailchimpClient = require("#mailchimp/mailchimp_transactional")(KEY);
exports.handler = async function(event, context) {
const response = await mailchimpClient.senders.addDomain({
domain: DOMAIN,
});
console.log(response);
return "success"
}
Here more examples and advanced configurations:
https://docs.aws.amazon.com/lambda/latest/dg/lambda-samples.html
https://github.com/awsdocs/aws-lambda-developer-guide/blob/main/sample-apps/blank-nodejs/function/index.js
https://github.com/awsdocs/aws-lambda-developer-guide/blob/main/sample-apps/nodejs-apig/function/index.js
It depends if you are using any framework to create your aws serverless code.
However, your usual code would be somthing like this.
exports.handler = function(event, context) {
console.log('Lambda A Received event:', JSON.stringify(event, null, 2));
context.succeed('Hello ' + event.name);
};
If you want a easier way to work with AWS serverless code such as Lambdas look at arc.codes
Also, here is a link to the AWS docs https://docs.aws.amazon.com/lambda/latest/dg/nodejs-handler.html

Unable to export db properties from nodejs module

I am trying to export database properties stored in properties file from Javascript module. By the time I read database properties file, Javascript file is already exported and data properties appear undefined wherever I use in other modules.
const Pool = require('pg').Pool;
const fs = require('fs')
const path = require('path');
class DbConfig {
constructor(dbData) {
this.pool = new Pool({
user: dbData['user'],
host: dbData['host'],
database: dbData['database'],
password: dbData['password'],
max: 20,
port: 5432
});
}
}
function getdbconf() {
const dbData = {};
fs.readFile("../../db_properties.txt"), 'utf8', (err, data) => {
if (err) {
console.error(err)
return
}
// dbData = {"user":"postgres", "password": "1234"...};
return dbData;
});
}
let db = new DbConfig(getdbconf());
let dbPool = db.pool;
console.log("dbpool : -> : ",dbPool); // username and password appear undefined
module.exports = { dbPool };
Is there a way to read data before exporting data from Javascript module?
Usually database config or any other sensitive info is read from a .env file using dotenv .
Or
you could also provide env from command line itself like
DB_HOST=127.0.0.1 node index.js
inside your index.js
console.log(process.env.DB_HOST)
Please create a new file (connection-pool.js) and paste this code:
const { Pool } = require('pg');
const poolConnection = new Pool({
user: 'postgresUserName',
host: 'yourHost',
database: 'someNameDataBase',
password: 'postgresUserPassword',
port: 5432,
});
console.log('connectionOptions', poolConnection.options);
module.exports = poolConnection;
For use it, create a new file (demo-connection.js) and paste this code:
const pool = require('./connection-pool');
pool.query('SELECT NOW();', (err, res) => {
if (err) {
// throw err;
console.log('connection error');
return;
}
if (res) {
console.log(res.rows);
pool.end();
}
});
This is an alternative option 🙂
Exporting the result of async calls
To export values which have been obtained asynchronously, export a Promise.
const fs = require('fs/promises'); // `/promise` means no callbacks, Promise returned
const dbDataPromise = fs.readFile('fileToRead')); //`readFile` returns Promise now
module.exports = dbDataPromise;
Importing
When you need to use the value,
const dbDataPromise = require('./dbdata');
async init() {
const dbData = await dbDataPromise;
}
//or without async, using Promise callbacks
init() {
dbDataPromise
.then(dbData => the rest of your code that depends on dbData here);
}
Current code broken
Please note that your current code, as pasted above, is broken:
function getdbconf() {
const dbData = {};
fs.readFile("../../db_properties.txt"), 'utf8', (err, data) => {
//[...] snipped for brevity
return dbData;
});
}
fs.readFile "returns" dbData, but there is nothing to return to, since you are in a callback which you did not call yourself. Function getdbconf returns nothing.
The line that says let db = new DbConfig(getdbconf()); will NOT work. It needs to be inside the callback.
The only way to avoid putting all of your code inside the callback (and "flatten" it) is to use await, or to use readFileSync
Avoiding the issue
Using environment variables
Suhas Nama's suggestion is a good one, and is common practice. Try putting the values you need in environment variables.
Using synchronous readFile
While using synchronous calls does block the event loop, it's ok to do during initialization, before your app is up and running.
This avoids the problem of having everything in a callback or having to export Promises, and is often the best solution.

Decrypt multiple env. variables nodejs - AWS Lambda

I'm having difficulty decrypting multiple environment variables in nodejs for an AWS lambda. I've looked at the code sample supplied in the console and the following two related questions:
Question 1,
Question 2
I have been able to successfully decrypt a single environment variable through their code sample, however, when I try to apply a cleaner approach through the use of promises (methods outlined in the questions above), I get this error when testing the lambda function in the console:
TypeError: First argument must be a string, Buffer, ArrayBuffer,
Array, or array-like object.
I was wondering if anyone has had this issue before and how I could go about resolving it?
Edit:
I've added some samples from my code below
const AWS = require('aws-sdk');
const mysql = require('mysql');
let connection;
const encrypted = {
username: process.env.username,
password: process.env.password,
database: process.env.database,
host: process.env.host
};
let decrypted = {};
const encryptedEnvVars = [process.env.username, process.env.password, process.env.database, process.env.host ];
exports.handler = (event, context, callback) => {
if (isEnvVarsDecrypted()) {
processEvent(event, context);
} else {
Promise.all(encryptedEnvVars.map(decryptKMS))
.then(decryptEnvVars)
.catch(console.log);
}
};
function decryptKMS(key) {
return new Promise((resolve, reject) => {
const kms = new AWS.KMS()
kms.decrypt({ CiphertextBlob: new Buffer(key, 'base64') }, (err, data) => {
if(err) { reject(err); }
else { resolve(data.Plaintext.toString('ascii')); }
});
});
}
var decryptEnvVars = data => {
return new Promise((resolve, reject) => {
console.log(data);
decrypted.username = data[0].Plaintext.toString('ascii');
decrypted.password = data[1].Plaintext.toString('ascii');
decrypted.database = data[2].Plaintext.toString('ascii');
decrypted.host = data[3].Plaintext.toString('ascii');
resolve();
});
};
var isEnvVarsDecrypted = () => {
return decrypted.username && decrypted.password && decrypted.database && decrypted.host;
}
If key is null, then new Buffer(key, 'base64') will fail with the error you describe.
When I ran your code myself:
If any environment variable was missing, the error occurred
When all environment variables were declared, the error ceased
So, you should confirm that the environment variables you reference are actually defined.
A couple of other pointers:
Make sure you are always calling the lambda callback, regardless of success/failure; this is how you signal to the lambda environment that execution has ended.
After calling decryptEnvVars, you should call your processEvent function

Export object with module.exports with callback functions

I am trying to learn how to write my own modules for nodejs, specifically to implement various objects, with which I could then work throughout the app.
I want to use result something like this:
//assuming i have database:
Person_table(ID int A_I, NAME varchar, AGE int)
//code:
var p = new Person("John", 22);
p.writeToDatabase();
//the object is now written in database
I have tried the following, but for some reason beyond my understanding it doesn't work.
I have declared three files: db.js , person.js , app.js
db.js
var mysql = require('mysql');
var conn = mysql.createPool({
host : 'localhost',
database: 'db_name',
user: 'user_name',
password : 'pass',
multipleStatement: true,
connectionLimit : 10
});
conn.getConnection(function(err){
if(err) throw err;
});
module.exports = conn;
person.js
var db = require('./db.js');
function Person(n, a) {
this.name = n;
this.age = a;
}
Person.prototype.writeToDatabase = function (callback) {
db.query("INSERT INTO Person_table(NAME, AGE) VALUES(?,?)", [this.name, this.age], function (err, rows) {
if (err) return callback(err);
else return callback(null, rows);
});
}
module.exports = Person;
app.js
var Person = require('./person.js')
var p = new Person("John", 22);
p.writeToDatabase(function(err, rows){
if(err) console.log(err);
else console.log("written to DB");
});
I would appreciate the help on what is wrong with the code.
As a bonus, I would like to ask any good literature on subject of module exporting and object prototypes to create layered nodejs app.
The first advice I'll give is the following, you can use your callback like this in your person.js:
Person.prototype.writeToDatabase = function (callback) {
db.query("INSERT INTO Person_table(NAME, AGE) VALUES(?,?)",
[this.name, this.age],
callback // Since you do nothing with `err` or `rows`
);
}
Why ? Because you handle the callback in your app.js file :).
Next, I think it doesn't work because you don't wait for your function conn.getConnection to end, and moreover, you don't use the return of this function ! Check the documentation here.
You should reorganize your code like:
db.js
var mysql = require('mysql');
module.exports = mysql.createPool({
host : 'localhost',
database: 'db_name',
user: 'user_name',
password : 'pass',
multipleStatement: true,
connectionLimit : 10
});
person.js
module.exports = function (connection) { // You need the connection to query your database.
function Person(n, a) {
this.name = n;
this.age = a;
}
Person.prototype.writeToDatabase = function (callback) {
// The connection is used here.
connection.query("INSERT INTO Person_table(NAME, AGE) VALUES(?,?)",
[this.name, this.age],
callback
);
}
return Person;
};
app.js
var db = require('./db');
db.getConnection(function (err, connection) { // Once you're connected to the database, you can start using your app.
var Person = require('./person')(connection); // Since we export a function with a parameter, we need to require the person.js like this.
var p = new Person("John", 22);
p.writeToDatabase(function(err, rows) {
if(err) console.log(err);
else console.log("written to DB");
});
});
If I'm not clear, you can ask for more details :). If the pattern I show you doesn't fit with your expectation you can do the job in other ways ;)
I figured out the problem. My example is working perfectly, I had just one totally not related problem.
At the end of my short program i had process.exit() so that my app would end when it would finish the task. However I haven't taken into account the asynchronous nature of node.js so, my process.exit() executed before my write to database was completed, therefore it failed.
Thanks for help anyway!

Mongoose query blocks Node.js

Mongoose blocks Node.js when it is getting data. I thought that it is supposed to be absolutely no blocking and when callback appears then it should just get back there.
The problem is with:
Container.find({}, function (err, documents) {
res.status(200).send(documents);
});
When I will run this route in ExpressJS it will just freeze NodeJS for around 10sec, and no one else can reach connection then.
I'm having a open connection to MongoDB at the start using Mongoose and not doing anything with it later on. What's the problem? It is supposed to work like that?
UPDATE:
So this is how I init mongoose
function initialDb() {
seed();
seedStructure();
startApplication();
}
database.connect();
database.loadModels(initialDb);
and this is the place where i connect and init models
import mongoose from 'mongoose';
import chalk from 'chalk';
import config from '../config';
export default {
loadModels(callback){
require('../../models/site');
require('../../models/page');
require('../../models/container');
require('../../models/template');
require('../../models/theme');
if (typeof callback === 'function') {
return callback();
}
},
connect () {
mongoose.connect(config.db.uri, function (err) {
if (err) {
console.error(chalk.red('Could not connect to MongoDB!'));
console.log(err);
}
});
},
disconnect(callback) {
mongoose.disconnect(function (err) {
console.info(chalk.yellow('Disconnected from MongoDB.'));
callback(err);
});
}
};
and the model
var mongoose = require('mongoose');
var Schema = mongoose.Schema;
var container = new Schema({
});
let model = mongoose.model('container', container);
module.exports = model;
It returns around 26k documents.
Ok so basically I found out, that if I will stream that instead of getting it with one callback it will work way better (I will be able to get into some other actions)
like this
var stream = Container.find({}).stream();
var array = [];
stream.on('data', (doc) => {
array.push(doc);
}).on('error', (err) => {
}).on('close', () => {
res.send(array);
});
It will solve the problem. So this is how I would get big data from mongodb, though why it is slowing so much if I will get it in one callback? Due to 12MB data? Big json that needs to be parsed or what?
Cause this is a quite mysterious for me (the reason for slow down)

Categories

Resources