Strongloop app does not load local datasource - javascript

I want to use different environment specific datasource configurations in a Strongloop app. I saw at https://docs.strongloop.com/display/public/LB/Environment-specific+configuration that the priority of configurations are:
Environment-specific configuration, based on the value of NODE_ENV;
for example, server/config.staging.json.
Local configuration file;
for example, server/config.local.json.
Default configuration file;
for example, server/config.json.
I have declared three datasource conf files:
datasources.json:
{}
datasources.local.json:
{
"db": {
"name": "db",
"connector": "loopback-connector-mongodb",
"host":"127.0.0.1",
"port": "27017",
"database": "woowDev"
}
}
and datasources.staging.js:
module.exports = {
db: {
connector: 'mongodb',
hostname: process.env.OPENSHIFT_MONGODB_DB_HOST,
port: process.env.OPENSHIFT_MONGODB_DB_PORT,
user: process.env.OPENSHIFT_MONGODB_DB_USERNAME,
password: process.env.OPENSHIFT_MONGODB_DB_PASSWORD,
database: 'woow'
}
};
Now unless I put the configuration of datasources.local.json in datasources.json it does not work. I keep getting the error: AssertionError: User is referencing a dataSource that does not exist: "db"
I tried also to add the local conf to staging conf and defined the variable NODE_ENV, but it would not load neither datasource.staging.js. I defined the NODE_ENV by doing:
export NODE_ENV=staging

I used node-debug to track down the issue. And it came in this particular source strongloop file:
node_modules/loopback-boot/lib/config-loader.js
the function:
function mergeDataSourceConfig(target, config, fileName) {
for (var ds in target) {
var err = applyCustomConfig(target[ds], config[ds]);
if (err) {
throw new Error('Cannot apply ' + fileName + ' to `' + ds + '`: ' + err);
}
}
}
will not merge configs if "db" key is not defined in the master file i.e. datasources.json.
So, I just modified the datasources.json to:
{
"db": {}
}
and it worked!
Maybe it is my fault but the documentation is not clear enough.

Trick is to add all the datasources(memory/redis/mongo/postgres) in datasources.json and then override parameters in datasources.local.js or datasources.staging.js or datasources.production.js
Sample file configuration:
datasources.json
{
"db": {
"name": "db",
"connector": "memory"
},
"redisDS": {
"name": "redisDS",
"connector": "redis"
},
"testPostgress": {
"port": 5432,
"name": "localPostgress",
"user": "akumar",
"connector": "postgresql"
}
}
datasources.staging.js
module.exports = {
db:{
connector: 'memory'
},
redisDS:{
connector: 'redis'
},
testPostgress:{
database:'stagingPostgress'
}
};
Loopback will override database name in this case similarly you can override other datasource parameters like port and user

Related

Cypress 10 shows "The 'task' event has not been registered in the setupNodeEvents method. You must register it before using cy.task()"

In the cypress.config.js , I'm trying to register tasks / plug in events and set my env configuration exactly as documented on their guide.
However, when trying to use "on" inside setupNodeEvents I'm getting the error in the title about needing to register it.
Also, to note when passing both arguments (on, config), the config file does not pick up the env variable. Only when I put config first or config by itself, do the env variables pass.
Also, my tasks are properly coded inside the test classes. I know this because they work just fine for previous versions of cypress 9 but I can share them if someone thinks that's where the
/// <reference types="cypress" />
const { defineConfig } = require('cypress');
module.exports = defineConfig({
e2e: {
setupNodeEvents(on,config) {
if (config.env == undefined) {
return {
baseUrl: "intentionally blank",
env: {
env: "test",
schedulerBaseUrl: "intentionally blank",
signInUrl: "intentionally blank",
enableAccessibility: true,
takeScreenShot: false,
suites: "",
},
};
}
else if (config.env == 'development') {
return {
baseUrl: "https://blank.blank.com:blank",
env: {
environment: "development",
schedulerBaseUrl: "intentionally blank",
signInUrl: "intentionally blank",
enableAccessibility: false,
takeScreenShot: false
},
}
}
on('task', {
log(message) {
console.log(message)
return null
},
table(message) {
console.table(message)
return null
}
})
on('before:browser:launch', (browser, launchOptions) => {
if (browser.name === 'chrome' && browser.isHeadless) {
launchOptions.args.push('--disable-gpu');
launchOptions.args.push('--disable-dev-shm-usage');
launchOptions.args.push('use-fake-device-for-media-stream');
return launchOptions;
}
});
},
chromeWebSecurity: false,
screenshotOnRunFailure: false,
trashAssetsBeforeRuns: true,
numTestsKeptInMemory: 0,
video: true,
videoCompression: false,
enableAccessibility: false,
takeScreenShot: false,
defaultCommandTimeout: 10000,
execTimeout: 500000,
pageLoadTimeout: 500000,
retries: {
runMode: 1,
openMode: 0
},
blockHosts: [
"*intentionally blank"
],
redirectionLimit: 199,
projectId: "intentionally blank",
}
})
require('#applitools/eyes-cypress')(module);
I have had a similar issue, but for me it was the migration from Cypress 9.x.x to Cypress 10.+ while having some tasks and environmental variable overrides declared in the previous location, which was module.exports = (on, config) => {...} code block within ~/cypress/plugins/index.js file.
I tried migrating, yet I was running into some issues, yours included.
I came up with kind of a hacky way to keep it all in index.js and within cypress.config.js file I simply require the whole exported index.js module like this:
setupNodeEvents(on, config) {
return require('./cypress/plugins/index.js')(on, config)
},
Hope this helps.

"Runtime.ImportModuleError" trying to access npm package in an AWS lambda function using layers

I'd like to use the npm package "request" in an AWS lambda function.
I'm trying to follow the procedure outlined in this article here: https://medium.com/#anjanava.biswas/nodejs-runtime-environment-with-aws-lambda-layers-f3914613e20e
I've created a directory structure like this:
nodejs
│ package-lock.json
│ package.json
└───node_modules
My package.json looks like this:
{
"name": "my-package-name",
"version": "1.0.0",
"description": "whatever",
"author": "My Name",
"license": "MIT",
"dependencies": {
"request": "^2.88.0"
}
}
As far as I can tell from the article, all I should have to do with the above is run npm i, zip up the directory, upload it as a layer, and add the layer to my lambda function.
I've done all of that, but all that I get when I try to test my function is this:
{
"errorType": "Runtime.ImportModuleError",
"errorMessage": "Error: Cannot find module 'request'\nRequire stack:\n- /var/task/index.js\n- /var/runtime/UserFunction.js\n- /var/runtime/index.js",
"trace": [
"Runtime.ImportModuleError: Error: Cannot find module 'request'",
"Require stack:",
...
...as if the layer had never been added. The error is exactly the same whether the layer is added or not. If there's some sort of permissions issue that needs to be resolved, there's nothing in the article that indicates that.
I've tried a few different things, like whether or not my .zip file contains the top-level directory "nodejs" or just its contents. I've tried adding "main": "index.js", to my package.json, with an index.js file like this:
export.modules.request = require('request');
...all to no avail.
What am I missing?
Oh, I can't believe it's just this!
The top-level directory for the .zip file must LITERALLY be named "nodejs"! I was using a different name, and only changed it back to "nodejs" in the text of this post to be more generic, but the directory name was the real problem all along.
Sigh.
Usually, it's got to do with the name of the folder/files inside. And if those files are referred elsewhere, it's gonna percolate and complain there as well. Just check the folder structure thoroughly, you will be able to catch the thief. I struggled for a day to figure out, it was a silly typo.
For me, what was causing these issues was having a version of the package.json still inside an older version of the .build folder which had also been deployed. Once I removed that, packages were installed as expected.
I got this error also. The src.zip file should have the source code directly without any parent folder.
For example, if you want to zip src folder, you need to do this.
cd src/ && zip -r ../src.zip .
Ok so I found my issue. I was zipping a file containing my lambda instead of just my lambdas root. This was causing the lambda to look for my handler at ./index, but not finding it as it was located at ./nodejs/index.js.
Here is the command i used to properly zip my files from the root:
cd nodejs/
ls # should look like this: node_modules index.js package-lock.json package.json
zip -r ../nodejs.zip ./*
This zips everything properly so that the lambda finds your files at the root of the lambda like in the default configuration for creating a lambda through the aws UI.
Accessing table data from RDS using lambda function with encrypted key (KMS) and Environment variable
Step 1 :- First Enable key in KMS(Key Management Service (KMS))
Review your key Policy and Done! with KMS creation
{
"Id": "key-consolepolicy-3",
"Version": "2012-10-17",
"Statement": [
{
"Sid": "Enable IAM User Permissions",
"Effect": "Allow",
"Principal": {
"AWS": "arn:aws:iam::163806924483:root"
},
"Action": "kms:*",
"Resource": "*"
},
{
"Sid": "Allow access for Key Administrators",
"Effect": "Allow",
"Principal": {
"AWS": "arn:aws:iam::163806924483:user/User1#gmail.com"
},
"Action": [
"kms:Create*",
"kms:Describe*",
"kms:Enable*",
"kms:List*",
"kms:Put*",
"kms:Update*",
"kms:Revoke*",
"kms:Disable*",
"kms:Get*",
"kms:Delete*",
"kms:TagResource",
"kms:UntagResource",
"kms:ScheduleKeyDeletion",
"kms:CancelKeyDeletion"
],
"Resource": "*"
},
{
"Sid": "Allow use of the key",
"Effect": "Allow",
"Principal": {
"AWS": [
"arn:aws:iam::163806924483:user/User1#gmail.com",
"arn:aws:iam::163806924483:user/User2#gmail.com",
"arn:aws:iam::163806924483:user/User3#gmail.com"
]
},
"Action": [
"kms:Encrypt",
"kms:Decrypt",
"kms:ReEncrypt*",
"kms:GenerateDataKey*",
"kms:DescribeKey"
],
"Resource": "*"
},
{
"Sid": "Allow attachment of persistent resources",
"Effect": "Allow",
"Principal": {
"AWS": [
"arn:aws:iam::163806924483:user/User1.dilip#gmail.com",
"arn:aws:iam::163806924483:user/User2#gmail.com",
"arn:aws:iam::163806924483:user/User3#gmail.com"
]
},
"Action": [
"kms:CreateGrant",
"kms:ListGrants",
"kms:RevokeGrant"
],
"Resource": "*",
"Condition": {
"Bool": {
"kms:GrantIsForAWSResource": "true"
}
}
}
]
}
Step:- 2 Create a policy in IAM for KMS assign to ur each lambda function
"StringEquals": {
"kms:EncryptionContext:LambdaFunctionName": [
"LambdaFunction-1",
"LambdaFunction-2",
"LambdaFunction-3"
]
}
Step 3:- Assign a Policy created in Step-2 to ur default lambda Role(1st Lambda need to be created to get default lambda role)
Step 4:- Create lambda Function
Node.js Code for lambda Function
const mysql = require('mysql');
const aws = require("aws-sdk");
const functionName = process.env.AWS_LAMBDA_FUNCTION_NAME;
let res;
let response={};
exports.handler = async(event) => {
reset_globals();
// load env variables
const rds_user = await kms_decrypt(process.env.RDS_USERNAME);
const rds_pwd = await kms_decrypt(process.env.RDS_PASSWORD)
// setup rds connection
var db_connection = await mysql.createConnection({
host: process.env.RDS_HOSTNAME,
user: rds_user,
password: rds_pwd,
port: process.env.RDS_PORT,
database: process.env.RDS_DATABASE
});
var sqlQuery = `SELECT doc_id from documents`;
await getValues(db_connection,sqlQuery);
}
async function getValues(db_connection,sql) {
await new Promise((resolve, reject) => {
db_connection.query(sql, function (err, result) {
if (err) {
response = {statusCode: 500, body:{message:"Database Connection Failed",
error: err}};
console.log(response);
resolve();
}
else {
console.log("Number of records retrieved: " + JSON.stringify(result));
res = result;
resolve();
}
});
});
}
async function kms_decrypt(encrypted) {
const kms = new aws.KMS();
const req = { CiphertextBlob: Buffer.from(encrypted, 'base64'), EncryptionContext: {
LambdaFunctionName: functionName } };
const decrypted = await kms.decrypt(req).promise();
let cred = decrypted.Plaintext.toString('ascii');
return cred;
}
function reset_globals() {
res = (function () { return; })();
response = {};
}
Now u should see KMS in Lambda.
Step 5:- Set Environment Variable and encrypt it.
Lambda ->Functions -> Configuration -> Environment Variable -> Edit
RDS_DATABASE docrds
RDS_HOSTNAME docrds-library.c1k3kcldebmp.us-east-1.rds.amazonaws.com
RDS_PASSWORD root123
RDS_PORT 3306
RDS_USERNAME admin
In Lambda Function to decrypt the encrypted environment variabled use below code
function kms_decrypt(encrypted) {
const kms = new aws.KMS();
const req = { CiphertextBlob: Buffer.from(encrypted, 'base64'), EncryptionContext: {
LambdaFunctionName: functionName } };
const decrypted = await kms.decrypt(req).promise();
let cred = decrypted.Plaintext.toString('ascii');
return cred;
}
My RDS document table looks like:-
I am accessing column doc_id using sqlQuery in lambda function
var sqlQuery = `SELECT doc_id from documents`;
After testing the lambda function, I get below output.
If u gets SQL import Error, then can must add a layer.
errorType": "Runtime.ImportModuleError",
"errorMessage": "Error: Cannot find module 'mysql'\nRequire stack:\n-
/var/task/index.js\n- /var/runtime/UserFunction.js\n- /var/runtime/index.js",
"trace": [
"Runtime.ImportModuleError: Error: Cannot find module 'mysql'",
You can configure your Lambda function to use additional code and
content in the form of layers. A layer is a ZIP archive that contains
libraries, a custom runtime, or other dependencies. With layers, you
can use libraries in your function without needing to include them in
your deployment package.
To include libraries in a layer, place them in the directory structure
that corresponds to your programming language.
Node.js – nodejs/node_modules
Python – python
Ruby – ruby/gems/2.5.0
Java – java/lib
First create a zip archieve that contain mysql archieve.
First create a react-project
Then in terminal $project-path > npm init
Then $project-path > npm install mysql
You should see node_modules folder created.
Zip node_modules that folder and upload on layer as shown below.
Then, Goto Lambda--> Layer-->Create layer.

loopback send email with attachment not working

I have the career PersistedModel for storing the data in the database and i have the attachment model for file storage to store in some location.Now i want to send an email with the data. I can able to send only the career data but i want to send attachment also with the same email.I could not able to fetch the file name because it is not in the career model it is in the attachment. How to do get the file name and send it help me out.
career.js
const app = require('../../server/server');
module.exports = function(Career) {
Career.afterRemote('create', function(context, remoteMethodOutput, next) {
next();
console.log(remoteMethodOutput)
Career.app.models.Email.send({
to: 'lakshmipriya.l#gmail.com',
from: 'lakshmipriya.l#gmail.com',
subject: 'my subject',
html: 'Hello-world',
attachments: [
{
path: '../files/resume/'+remoteMethodOutput.resume,
}
],
}, function(err, mail) {
// console.log(context.result.email)
console.log('email sent!');
cb(err);
});
});
};
attachment.json
{
"name": "attachment",
"base": "Model",
"idInjection": true,
"options": {
"validateUpsert": true
},
"properties": {},
"validations": [],
"relations": {},
"acls": [],
"methods": {}
}
My project structure where i used to store the files is
Using absolute path for your files is always more robust than relative path. Use __dirname for that :
const filePath = __dirname + '/files/resume/' + remoteMethodOutput.resume;
If you need to go up one level, then enter the files directory, you need Node's path module to resolve it :
const path = require("path"),
filePath = path.normalize(__dirname + '/../files/resume/' + remoteMethodOutput.resume)

ssh2 node js sftp protocol Error Handshake failed

Hello i have a little problem, i developped a script sftp client with node js that connect to an sftp server and grab some files, i tested it with my local server its working, but when i tried to use it with production server i received this error :
Error: Handshake failed: no matching key exchange algorithm
i already generated the rsa key using ssh-keygen
here is the relevant part of the script :
var Client = require('ssh2').Client;
var fs = require('fs');
var path = require('path');
var args = process.argv.slice(2);
var connSettings = {
host: args[0] || '127.0.0.1',
port: args[1] || 22,
username: args[2] || 'karim',
password: args[3] || 'karimos',
algorithms: {
hmac: ['hmac-sha2-256', 'hmac-sha2-512', 'hmac-sha1', 'hmac-sha1-96']
}
};
I also had the same problem and solved it by adding the following:
algorithms: {
kex: [
"diffie-hellman-group1-sha1",
"ecdh-sha2-nistp256",
"ecdh-sha2-nistp384",
"ecdh-sha2-nistp521",
"diffie-hellman-group-exchange-sha256",
"diffie-hellman-group14-sha1"
],
cipher: [
"3des-cbc",
"aes128-ctr",
"aes192-ctr",
"aes256-ctr",
"aes128-gcm",
"aes128-gcm#openssh.com",
"aes256-gcm",
"aes256-gcm#openssh.com"
],
serverHostKey: [
"ssh-rsa",
"ecdsa-sha2-nistp256",
"ecdsa-sha2-nistp384",
"ecdsa-sha2-nistp521"
],
hmac: [
"hmac-sha2-256",
"hmac-sha2-512",
"hmac-sha1"
]
}
For myself, I added debug: console.log to my config object. This output more about the connection attempt.
{
"port": 22,
"host": "test.test.com",
"user": "test",
"password": "******",
"debug": console.log
}
Handshake: (remote) KEX method: diffie-hellman-group14-sha1,diffie-hellman-group-exchange-sha1
Handshake: No matching key exchange algorithm
Based on this error I updated my config's algorithm:
{
"port": 22,
"host": "test.test.com",
"user": "test",
"password": "******",
"algorithms": {
"kex": [
"diffie-hellman-group14-sha1","diffie-hellman-group-exchange-sha1"
]
}
}
After adding this algorithm the connection was successful on my machine
You may edit your /etc/ssh/sshd configuration file, on your server, in order to allow the key authentication method :)
My first suggestion would be to upgrade the ssh server on the server you're connecting to so that a more secure configuration can be had. This is the best/most secure solution.
If you cannot make changes on this server and you absolutely need to connect, then you can explicitly set the kex to a list of key exchange methods you want to support (valid algorithm names can be found in the ssh2-streams documentation). For example:
algorithms: {
kex: [ ... ]
}
Have you tried changing your algorithms declaration to...?
algorithms: {
serverHostKey: [ 'hmac-sha2-256', 'hmac-sha2-512', 'hmac-sha1', 'hmac-sha1-96' ],
}

log4js-node in nodejs not logging to file

Not a node expert, and this is the first time I'm using log4js-node.
I am trying to get my ERROR logs and any of my console logs to write to a log_file.log file with log4js on a nodejs server running Express. Here is my config file:`
{
"replaceConsole": true,
"appenders": [
{
"type": "file",
"filename":"log_file.log",
"maxLogSize":20480,
"backups": 3,
"category":"relative-logger"
},
{
"type":"logLevelFilter",
"level":"ERROR",
"appender":{
"type":"file",
"filename":"log_file.log"
}
},
{
"appender": {
"type": "smtp",
"recipients": "myemail#gmail.com",
"sender": "myemailadd#gmail.com",
"sendInterval": 60,
"transport": "SMTP",
"SMTP": {
"host": "localhost",
"port": 25
}
}
}]
}`
And here is how I'm requiring the application in my app.js file:
var log4js = require("log4js");
log4js.configure("log_config.json")
logger = log4js.getLogger();
I'm sending manual errors to log4js with this (I can get this to log to the console fine, just can't get the log_file written):
logger.error('A mandrill error occurred: ' + e.name + ' - ' + e.message);
And I'm hoping jog4js catches the application's normal ERROR messages.
How do I get log4js to log to the log_file.log them send me an email of that log? I have installed nodemailer 0.7, fyi, to handle smtp.
maybe you could remove "category":"relative-logger" in your file appender.
Yes remove "category":"relative-logger" it somehow blocks the data transfer into your log file.. Or try something like this:
// Setup Logging
log4js.configure({
appenders: [
{ type: 'console' },
{ type: 'file', filename: '.\\logs\\PesaFastaArchiveData.log' }
]
});
The path is of-course the windows path.

Categories

Resources