Key element does not match the schema with DocumentClient - javascript

I have a Lambda that I have created using the AWS Toolkit for Visual Studio Code. I have a small lambda.js file that exports the handler for API Gateway events.
const App = require('./app');
const AWS = require('aws-sdk');
exports.handler = async (event, context) => {
let config = {
aws_region: 'us-east-1',
//endpoint: 'http://localhost:8000',
};
let dynamo = new AWS.DynamoDB.DocumentClient();
let app = new App(dynamo, config, event);
try {
let response = await app.run();
return response;
} catch(err) {
return err;
}
};
The app.js file represents the logic of my Lambda. This is broken up to improve testing.
const AWS = require('aws-sdk');
class App {
constructor(datastore, configuration, event) {
this.datastore = datastore;
this.httpEvent = event;
this.configuration = configuration;
}
async run() {
AWS.config.update({
region: this.configuration.aws_region,
endpoint: this.configuration.endpoint,
});
let params = {
TableName: 'test',
Key: {
'year': 2015,
'title': 'The Big New Movie',
},
};
const getRequest = this.datastore.get(params);
// EXCEPTION THROWN HERE
var result = await getRequest.promise();
let body = {
location: this.configuration.aws_region,
url: this.configuration.endpoint,
data: result
};
return {
'statusCode': 200,
'body': JSON.stringify(body)
};
}
}
module.exports = App;
I can write the following mocha test and get it to pass.
'use strict';
const AWS = require('aws-sdk');
const chai = require('chai');
const sinon = require('sinon');
const App = require('../app');
const expect = chai.expect;
var event, context;
const dependencies = {
// sinon.stub() prevents calls to DynamoDB and allows for faking of methods.
dynamo: sinon.stub(new AWS.DynamoDB.DocumentClient()),
configuration: {
aws_region: 'us-west-2',
endpoint: 'http://localhost:5000',
},
};
describe('Tests handler', function () {
// Reset test doubles for isolating individual test cases
this.afterEach(sinon.reset);
it('verifies successful response', async () => {
dependencies.dynamo.get.returns({ promise: sinon.fake.resolves('foo bar')});
let app = new App(dependencies.dynamo, dependencies.configuration, event);
const result = await app.run()
expect(result).to.be.an('object');
expect(result.statusCode).to.equal(200);
expect(result.body).to.be.an('string');
console.log(result.body);
let response = JSON.parse(result.body);
expect(response).to.be.an('object');
expect(response.location).to.be.equal(dependencies.configuration.aws_region);
expect(response.url).to.be.equal(dependencies.configuration.endpoint);
expect(response.data).to.be.equal('foo bar');
});
});
However, when I run the Lambda locally using the Debug Locally via the Code Lens option in VS Code the results from the AWS.DynamoDB.DocumentClient.get call throws an exception.
{
"message":"The provided key element does not match the schema",
"code":"ValidationException",
...
"statusCode":400,
"retryable":false,
"retryDelay":8.354173589804192
}
I have a table created in the us-east-1 region where the non-test code is configured to go to. I've confirmed the http endpoint being hit by the DocumentClient as being dynamodb.us-east-1.amazonaws.com. The table name is correct and I have a hash key called year and a sort key called title. Why would this not find the key correctly? I pulled this example from the AWS documentation, created a table to mirror what the key was and have not had any luck.

The issue is that the Key year was provided a number value while the table was expecting it to be a string.
Wrapping the 2015 in quotes let the Document know that this was a string and it could match to the schema in Dynamo.
let params = {
TableName: 'test',
Key: {
'year': '2015',
'title': 'The Big New Movie',
},
};

Related

Unit Testing with Jest for Strapi v4

I am trying to perform unit tests with Jest for the new version of Strapi, v4 which was just released a couple of weeks ago. In accordance with their documentation, the old guide for unit testing no longer runs as expected. I have, however, modified the code to work to a certain extent. Currently I have the following:
./test/helpers/strapi.js:
const Strapi = require("#strapi/strapi");
let instance;
async function setupStrapi() {
if (!instance) {
/** the following code in copied from `./node_modules/strapi/lib/Strapi.js` */
await Strapi().load();
instance = strapi; // strapi is global now
await instance.server
.use(instance.server.router.routes()) // populate KOA routes
.use(instance.server.router.allowedMethods()); // populate KOA methods
await instance.server.mount();
}
return instance;
}
module.exports = {
setupStrapi
};
./tests/app.test.js:
const fs = require("fs");
const { setupStrapi } = require("./helpers/strapi");
beforeAll(async () => {
await setupStrapi();
});
afterAll(async () => {
const dbSettings = strapi.config.get("database.connection.connection");
//close server to release the db-file
await strapi.server.destroy();
//DATABASE_FILENAME=.tmp/test.db
//delete test database after all tests
if (dbSettings && dbSettings.filename) {
const tmpDbFile = `${dbSettings.filename}`;
if (fs.existsSync(tmpDbFile)) {
fs.unlinkSync(tmpDbFile);
}
}
});
it("should return hello world", async () => {
await request(strapi.server.httpServer).get("/api/hello").expect(200); // Expect response http code 200
});
./config/env/test/database.js
const path = require("path");
module.exports = ({ env }) => ({
connection: {
client: "sqlite",
connection: {
filename: path.join(
__dirname,
"../../../",
env("DATABASE_FILENAME", ".tmp/test.db")
),
},
useNullAsDefault: true,
},
});
The route /api/hello is a custom API endpoint. This works perfectly when running strapi develop, and all permissions are set correctly.
The tests run, but every endpoint that is not / or /admin returns 403 Forbidden, meaning there is a problem with the permissions. It would seem that the database file .tmp/data.db (used in development) is not replicated correctly in .tmp/test.db. In other words, this is close to working, but the permissions for API endpoints are not set correctly.
I have been searching through StackOverflow and the Stapi Forums over the past few days but to no avail. I would greatly appreciate some pointers as to how to fix this :)
It seems you need to grant the right privileges to your routes on your test DB.
For that you can create a function, lets call it grantPriviledge, and call it in your test in the function beforeAll.
// Here I want to grant the route update in my organization collection
beforeAll(async () => {
await grantPrivilege(1, 'permissions.application.controllers.organization.update');
});
And here is the function grantPriviledge:
// roleID is 1 for authenticated and 2 for public
const grantPrivilege = async (roleID = 1, value, enabled = true, policy = '') => {
const updateObj = value
.split('.')
.reduceRight((obj, next) => ({ [next]: obj }), { enabled, policy });
const roleName = roleID === 1 ? 'Authenticated' : 'Public';
const roleIdInDB = await strapi
.query('role', 'users-permissions')
.findOne({ name: roleName });
return strapi.plugins['users-permissions'].services.userspermissions.updateRole(
roleIdInDB,
updateObj,
);
};
Let me know if that helps
So in order for that to work in v4, this is how I did.
This was based in this and this but Stf's posting in this saying "inject them in your database during the bootstrap phase like it is made in the templates" was what really set me in the right track.
So if you look here you will see this function:
async function setPublicPermissions(newPermissions) {
// Find the ID of the public role
const publicRole = await strapi
.query("plugin::users-permissions.role")
.findOne({
where: {
type: "public",
},
});
// Create the new permissions and link them to the public role
const allPermissionsToCreate = [];
Object.keys(newPermissions).map((controller) => {
const actions = newPermissions[controller];
const permissionsToCreate = actions.map((action) => {
return strapi.query("plugin::users-permissions.permission").create({
data: {
action: `api::${controller}.${controller}.${action}`,
role: publicRole.id,
},
});
});
allPermissionsToCreate.push(...permissionsToCreate);
});
await Promise.all(allPermissionsToCreate);
}
Later on the code, this function is called like this:
await setPublicPermissions({
article: ["find", "findOne"],
category: ["find", "findOne"],
author: ["find", "findOne"],
global: ["find", "findOne"],
about: ["find", "findOne"],
});
So in my case I modified this function a bit to accept between authenticated (1) and public (2) roles inspired by Sidney C answer above.
This is how I did it:
const grantPrivilege = async (roleID = 1, newPermissions) => {
const roleName = roleID === 1 ? "authenticated" : "public";
// Find the ID of the public role
const roleEntry = await strapi
.query("plugin::users-permissions.role")
.findOne({
where: {
type: roleName,
},
});
// Create the new permissions and link them to the public role
const allPermissionsToCreate = [];
Object.keys(newPermissions).map((controller) => {
const actions = newPermissions[controller];
const permissionsToCreate = actions.map((action) => {
return strapi.query("plugin::users-permissions.permission").create({
data: {
action: `api::${controller}.${controller}.${action}`,
role: roleEntry.id,
},
});
});
allPermissionsToCreate.push(...permissionsToCreate);
});
await Promise.all(allPermissionsToCreate);
};
And then in my beforeAll block I call it like this:
await grantPrivilege(1, {
"my-custom-collection": ["create", "update"],
category: ["find", "findOne"],
author: ["find", "findOne"],
});

Node.Js google dataflow sinon stub is not working

Here is my function which calls google dataflow function
index.js
const { google } = require('googleapis');
const triggerDataflowJob = async (event, context) => {
const auth = new google.auth.GoogleAuth({
scopes: ['https://www.googleapis.com/auth/cloud-platform'],
});
const authClient = await auth.getClient();
const projectId = await auth.getProjectId();
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
const dataflowReqBody = dataflowRequest(projectId, event.bucket, event.name, context);
return dataflow.projects.locations.templates.create(dataflowReqBody);
};
module.exports = { triggerDataflowJob };
My unit test for above function
index.test.js
const { google } = require('googleapis');
const { triggerDataflowJob } = require('./index.js');
describe('Function: triggerDataflowJob', () => {
it('should return success', async () => {
const projectsStub = sinon.stub().returnsThis();
const locationsStub = sinon.stub().returnsThis();
const dataflowStub = sinon.stub(google, 'dataflow').callsFake(() => ({
projects: projectsStub,
locations: locationsStub,
templates: sinon.stub(),
}));
const context = { eventId: '126348454' };
const event = { bucket: 'test-bucket', name: 'test-file.json' };
await triggerDataflowJob(event, context);
sinon.assert.calledOnce(dataflowStub);
});
});
But I am getting below error when I run test.
1) Trigger Dataflow Job:
Function: triggerDataflowJob
should return success:
TypeError: Cannot read property 'templates' of undefined
at triggerDataflowJob (index.js:12:38)
at process._tickCallback (internal/process/next_tick.js:68:7)
Can some one please help where is the issue? or what I am missing or doing wrong?
From the error, it looks like the dataflow object you are getting back does not have a templates key within the location object.
Looking at your test, it looks like the dataflow object looks something like this:
const dataflow = {
projects: {...},
locations: {...},
templates: {...}
}
In the return statement of the main function you are looking for templates within locations.
return dataflow.projects.locations.templates.create(dataflowReqBody);
If locations is suppose to have a templates key, then you may need to update your test and the way your mocking the object. If it is not suppose to have a templates key then you can update your return statement like so:
return dataflow.projects.templates.create(dataflowReqBody);
Hope that helps!

How to mock pg Pool with Sinon

In a previous project I mocked the mysql library with Sinon. I did this like so:
X.js:
const con = mysql.createPool(config.mysql);
...
Some other place in the project:
const rows = await con.query(query, inserts);
...
X.test.js:
const sinon = require('sinon');
const mockMysql = sinon.mock(require('mysql'));
...
mockMysql.expects('createPool').returns({
query: () => {
// Handles the query...
},
...
It worked perfectly.
In another project I am trying to mock pg, again with Sinon.
pool.js:
const { Pool } = require('pg');
const config = require('#blabla/config');
const pool = new Pool(config.get('database'));
module.exports = pool;
Some other place in the project:
const con = await pool.connect();
const result = await con.query(...
Y.test.js:
???
I can't understand how to mock connect().query(). None of the following approaches work:
1:
const { Pool } = require('pg');
const config = require('#blabla/config');
const mockPool = sinon.mock(new Pool(config.get('database')));
...
mockPool.expects('connect').returns({
query: () => {
console.log('query here');
},
});
1 results in no error but the real db connection is used.
2:
const { Pool } = sinon.mock(require('pg'));
const config = require('#blabla/config');
const pool = new Pool(config.get('database'));
pool.expects('connect').returns({
query: () => {
console.log('query here');
},
});
2 => TypeError: Pool is not a constructor
3:
const { Pool } = sinon.mock(require('pg'));
const config = require('#blabla/config');
const pool = sinon.createStubInstance(Pool);
pool.connect.returns({
query: () => {
console.log('query here');
},
});
3 => TypeError: The constructor should be a function.
Can anybody point me in the right direction with how to mock my PostgreSQL connection?
Example: I have postgres.js like this.
const { Pool } = require('pg');
const handler = {
count: async (pgQuery) => {
try {
const pool = new Pool();
const res = await pool.query(pgQuery);
return { count: parseInt(res.rows[0].counter, 10) };
} catch (error) {
// Log/Throw error here.
}
return false;
}
}
module.exports = handler;
The spec test I created on postgres.spec.js is like this.
const { expect } = require('chai');
const sinon = require('sinon');
const pgPool = require('pg-pool');
const handler = require('postgres.js');
describe('Postgres', function () {
it('should have method count that bla bla', async function () {
// Create stub pgPool query.
const postgreeStubQuery = sinon.stub(pgPool.prototype, 'query');
postgreeStubQuery.onFirstCall().throws('XXX');
postgreeStubQuery.onSecondCall().resolves({
rows: [{ counter: 11 }],
});
// Catch case.
const catcher = await handler.count('SELECT COUNT()..');
expect(catcher).to.equal(false);
expect(postgreeStubQuery.calledOnce).to.equal(true);
// Correct case.
const correct = await handler.count('SELECT COUNT()..');
expect(correct).to.deep.equal({ count: 11 });
expect(postgreeStubQuery.calledTwice).to.equal(true);
// Restore stub.
postgreeStubQuery.restore();
});
});
To stub pool.query(), you need to stub pg-pool prototype and method query.
Hope this helps.
Since you're needing to mock the returned results of a query, I think the easiest solution would be to abstract your database from the the code needing the query results. Example being, your query results are returning information about a person. Create a person.js module with specific methods for interacting with the database.
Your other code needing the person information from the database won't know or care what type of database you use or how you connect to it, all they care to know is what methods are exposed from person.js when they require it.
//person.js
const { Pool } = require('pg')
// do other database connection things here
const getPersonById = function (id) {
// use your query here and return the results
}
module.exports = { getPersonById }
Now in your tests, you mock the person module, not the pg module. Imagine if you had 20 some odd tests that all had the mock MySQL pool set up then you changed to pg, you'd have to change all of those, nightmare. But by abstracting your database connection type/setup, it makes testing much easier, because now you just need to stub/mock your person.js module.
const person = require('../person.js') //or whatever relative file path it's in
const sinon = require('sinon')
describe('person.js', function () {
it('is stubbed right now', function () {
const personStub = sinon.stub(person)
personStub.getPersonById.returns('yup')
expect(personStub.getPersonById()).to.eq('yup')
})
})
Below is a simpler approach that means the system-under-test doesn't need any special tricks.
It is comprised of two parts, though the first is "nice to have":
Use a DI framework to inject the pg.Pool. This is a better approach IMO anyway, and fits really well with testing.
In the beforeEach() of the tests, configure the DI framework to use a mock class with sinon.stub instances.
If you aren't using a DI framework, pass the mock as a Pool parameter... but DI is better ;)
The code below is TypeScript using tsyringe, but similar approaches will work fine with plain JavaScript etc.
Somewhere you'll have code that uses pg.Pool. A contrived example:
import { Pool } from 'pg'
...
function getPets(pool: Pool): Promise<Pet[]> {
return pool.connect()
.then(db => db.query(SQL_HERE)
.then(result => {
db.release()
return result.rows // or result.rows.map(something) etc
})
.catch(error => {
db.release()
throw error
})
)
}
That works, and it's fine if you want to pass the Pool instance in. I'd prefer not to, so I use tsyringe like this:
import { container } from 'tsyringe'
...
function getPets(): Promise<Pet[]> {
return container.resolve<Pool>().connect()
.then(...)
}
Exactly the same outcome, but getPets() is cleaner to call - it can be a pain to lug around a Pool instance.
The main of the program would set up an instance in one of a few ways. Here's mine:
...
container.register(Pool, {
useFactory: instanceCachingFactory(() => {
return new Pool(/* any config here */)
})
})
The beauty of this comes out in tests.
The code above (the "system under test") needs a Pool instance, and that instance needs a connect() method that resolves to a class with query() and release() methods.
This is what I used:
class MockPool {
client = {
query: sinon.stub(),
release: sinon.stub()
}
connect () {
return Promise.resolve(this.client)
}
}
Here's the setup of a test using MockPool:
describe('proof', () => {
let mockPool: MockPool
beforeEach(() => {
// Important! See:
// https://github.com/microsoft/tsyringe#clearing-instances
container.clearInstances()
mockPool = new MockPool()
container.registerInstance(Pool, mockPool as unknown as Pool)
})
})
The cast through unknown to Pool is needed because I'm not implementing the whole Pool API, just what I need.
Here's what a test looks like:
it('mocks postgres', async () => {
mockPool.client.query.resolves({
rows: [
{name: 'Woof', kind: 'Dog'},
{name: 'Meow', kind: 'Cat'}
]
})
const r = await getPets()
expect(r).to.deep.equal([
{name: 'Woof', kind: 'Dog'},
{name: 'Meow', kind: Cat'}
])
})
You can easily control what data the mock Postgres Pool returns, or throw errors, etc.

AWS CDK user pool authorizer

I'm trying to create an API gateway using the AWS-CDK and protect the REST endpoints with a Cognito user pool authorizer.
I cannot find any examples how one would do this. I thought it should look something like this but maybe the methods I need do not exist?
const cdk = require('#aws-cdk/cdk');
const lambda = require('#aws-cdk/aws-lambda');
const apigw = require('#aws-cdk/aws-apigateway');
const path = require('path');
//
// Define the stack:
class MyStack extends cdk.Stack {
constructor (parent, id, props) {
super(parent, id, props);
var tmethodHandler = new lambda.Function(this, 'test-lambda', {
runtime: lambda.Runtime.NodeJS810,
handler: 'index.handler',
code: lambda.Code.directory( path.join( __dirname, 'lambda')),
});
var api = new apigw.RestApi(this, 'test-api');
const tmethod = api.root.addResource('testmethod');
const tmethodIntegration = new apigw.LambdaIntegration(tmethodHandler);
tmethod.addMethod('GET', getSessionIntegration, {
authorizationType: apigw.AuthorizationType.Cognito,
authorizerId : 'crap!!!?'
});
}
}
class MyApp extends cdk.App {
constructor (argv) {
super(argv);
new MyStack(this, 'test-apigw');
}
}
console.log(new MyApp(process.argv).run());
As of September 2019 #bgdnip answer doesnt translate exactly for typescript. I got it working with the following:
const api = new RestApi(this, 'RestAPI', {
restApiName: 'Rest-Name',
description: 'API for journey services.',
});
const putIntegration = new LambdaIntegration(handler);
const auth = new CfnAuthorizer(this, 'APIGatewayAuthorizer', {
name: 'customer-authorizer',
identitySource: 'method.request.header.Authorization',
providerArns: [providerArn.valueAsString],
restApiId: api.restApiId,
type: AuthorizationType.COGNITO,
});
const post = api.root.addMethod('PUT', putIntegration, { authorizationType: AuthorizationType.COGNITO });
const postMethod = post.node.defaultChild as CfnMethod;
postMethod.addOverride('Properties.AuthorizerId', { Ref: auth.logicalId });
This is from https://docs.aws.amazon.com/cdk/latest/guide/cfn_layer.html#cfn_layer_resource_props
UPDATE October
The above is already out of date and unnecessary and can be achieved with the following with aws-cdk 1.12.0
const api = new RestApi(this, 'RestAPI', {
restApiName: 'Rest-Name',
description: 'API for journey services.',
});
const putIntegration = new LambdaIntegration(handler);
const auth = new CfnAuthorizer(this, 'APIGatewayAuthorizer', {
name: 'customer-authorizer',
identitySource: 'method.request.header.Authorization',
providerArns: [providerArn.valueAsString],
restApiId: api.restApiId,
type: AuthorizationType.COGNITO,
});
const post = api.root.addMethod('PUT', putIntegration, {
authorizationType: AuthorizationType.COGNITO,
authorizer: { authorizerId: auth.ref }
});
The previous answers no longer work because the authorizerId property was replaced with authorizer, which isn't fully implemented at this time.
Instead, it can be done by using the underlying CfnResource objects, as described in the official guide.
Here's Python code as an example:
from aws_cdk import cdk
from aws_cdk import aws_apigateway
class Stk(cdk.Stack):
def __init__(self, app, id):
super().__init__(app, id)
api_gw = aws_apigateway.RestApi(self, 'MyApp')
post_method = api_gw.root.add_method(http_method='POST')
# Create authorizer using low level CfnResource
api_gw_authorizer = aws_apigateway.CfnAuthorizer(
scope=self,
id='my_authorizer',
rest_api_id=api_gw.rest_api_id,
name='MyAuth',
type='COGNITO_USER_POOLS',
identity_source='method.request.header.name.Authorization',
provider_arns=[
'arn:aws:cognito-idp:eu-west-1:123456789012:userpool/'
'eu-west-1_MyCognito'])
# Get underlying post_method Resource object. Returns CfnMethod
post_method_resource = post_method.node.find_child('Resource')
# Add properties to low level resource
post_method_resource.add_property_override('AuthorizationType',
'COGNITO_USER_POOLS')
# AuthorizedId uses Ref, simulate with a dictionaty
post_method_resource.add_property_override(
'AuthorizerId',
{"Ref": api_gw_authorizer.logical_id})
app = cdk.App()
stk = Stk(app, "myStack")
app.synth()
This is my solution in TypeScript (based somewhat on bgdnlp's response)
import { App, Stack, Aws } from '#aws-cdk/core';
import { Code, Function, Runtime } from '#aws-cdk/aws-lambda';
import { LambdaIntegration, RestApi, CfnAuthorizer, CfnMethod } from '#aws-cdk/aws-apigateway';
const app = new App();
const stack = new Stack(app, `mystack`);
const api = new RestApi(stack, `myapi`);
const region = Aws.REGION;
const account = Aws.ACCOUNT_ID;
const cognitoArn = `arn:aws:cognito-idp:${region}:${account}:userpool/${USER_POOL_ID}`;
const authorizer = new CfnAuthorizer(stack, 'Authorizer', {
name: `myauthorizer`,
restApiId: api.restApiId,
type: 'COGNITO_USER_POOLS',
identitySource: 'method.request.header.Authorization',
providerArns: [cognitoArn],
});
const lambda = new Function(stack, 'mylambda', {
runtime: Runtime.NODEJS_10_X,
code: Code.asset('dist'),
handler: `index.handler`,
});
const integration = new LambdaIntegration(lambda);
const res = api.root.addResource('hello');
const method = res.addMethod('GET', integration);
const child = method.node.findChild('Resource') as CfnMethod;
child.addPropertyOverride('AuthorizationType', 'COGNITO_USER_POOLS');
child.addPropertyOverride('AuthorizerId', { Ref: authorizer.logicalId });
Indeed. there is no example to do this via copy and paste ;). here is my example to create AWS cognito user pool and connect user pol authorizer with API gateway and lambda function using AWS CDK based on Java with Version 0.24.1.
This example ist just an example to provide an protected API for function called "Foo".
Cognito User Pool
API Gateway
Lambda
DynamoDB
// -----------------------------------------------------------------------
// Cognito User Pool
// -----------------------------------------------------------------------
CfnUserPool userPool = new CfnUserPool(this, "cognito",
CfnUserPoolProps.builder()
.withAdminCreateUserConfig(
AdminCreateUserConfigProperty.builder()
.withAllowAdminCreateUserOnly(false)
.build())
.withPolicies(
PoliciesProperty.builder()
.withPasswordPolicy(
PasswordPolicyProperty.builder()
.withMinimumLength(6)
.withRequireLowercase(false)
.withRequireNumbers(false)
.withRequireSymbols(false)
.withRequireUppercase(false)
.build()
)
.build()
)
.withAutoVerifiedAttributes(Arrays.asList("email"))
.withSchema(Arrays.asList(
CfnUserPool.SchemaAttributeProperty.builder()
.withAttributeDataType("String")
.withName("email")
.withRequired(true)
.build()))
.build());
// -----------------------------------------------------------------------
// Cognito User Pool Client
// -----------------------------------------------------------------------
new CfnUserPoolClient(this, "cognitoClient",
CfnUserPoolClientProps.builder()
.withClientName("UserPool")
.withExplicitAuthFlows(Arrays.asList("ADMIN_NO_SRP_AUTH"))
.withRefreshTokenValidity(90)
.withUserPoolId(userPool.getRef())
.build());
// -----------------------------------------------------------------------
// Lambda function
// -----------------------------------------------------------------------
Function function = new Function(this, "function.foo",
FunctionProps.builder()
// lamda code located in /functions/foo
.withCode(Code.asset("functions/foo"))
.withHandler("index.handler")
.withRuntime(Runtime.NODE_J_S810)
.build());
// -----------------------------------------------------------------------
// DynamoDB Table
// -----------------------------------------------------------------------
Table table = new Table(this, "dynamodb.foo", TableProps.builder()
.withTableName("foo")
.withPartitionKey(Attribute.builder()
.withName("id")
.withType(AttributeType.String)
.build())
.build());
// GRANTS function -> table
table.grantReadWriteData(function.getRole());
// -----------------------------------------------------------------------
// API Gateway
// -----------------------------------------------------------------------
// API Gateway REST API with lambda integration
LambdaIntegration lambdaIntegration = new LambdaIntegration(function);
RestApi restApi = new RestApi(this, "foo");
// Authorizer configured with cognito user pool
CfnAuthorizer authorizer = new CfnAuthorizer(this, "authorizer",
CfnAuthorizerProps.builder()
.withName("cognitoAuthorizer")
.withRestApiId(restApi.getRestApiId())
.withIdentitySource("method.request.header.Authorization")
.withProviderArns(Arrays.asList(userPool.getUserPoolArn()))
.withType("COGNITO_USER_POOLS")
.build());
// Bind authorizer to API ressource
restApi.getRoot().addMethod("ANY", lambdaIntegration, MethodOptions
.builder()
.withAuthorizationType(AuthorizationType.Cognito)
.withAuthorizerId(authorizer.getAuthorizerId())
.build());
I figured out what looks like a mechanism... I was able to get it to work like this:
var auth = new apigw.cloudformation.AuthorizerResource(this, 'myAuthorizer', {
restApiId: api.restApiId,
authorizerName: 'mypoolauth',
authorizerResultTtlInSeconds: 300,
identitySource: 'method.request.header.Authorization',
providerArns: [ 'arn:aws:cognito-idp:us-west-2:redacted:userpool/redacted' ],
type: "COGNITO_USER_POOLS"
});
tmethod.addMethod('GET', getSessionIntegration, {
authorizationType: apigw.AuthorizationType.Cognito,
authorizerId : auth.authorizerId
});
Now to figure out how to enable CORS headers on API Gateway...
You have to:
create the api gateway
set Cognito as authorizer in the api gateway
set the authorization in your method
set your integration with the lambda to 'Use Lambda Proxy integration'. The LambdaIntegration properties has on true this value by default, so don't worry for it
Finally, make a request adding the token in the Header. The API gateway will validate it with Cognito. If this pass then, your lambda will be triggered and in the event you can find the claims event.requestContext.authorizer.claims.
const lambda = require("#aws-cdk/aws-lambda");
const apiGateway = require('#aws-cdk/aws-apigateway');
const api = new apiGateway.RestApi(
this,
'<id-ApiGateway>',
{
restApiName: '<ApiGateway-name>',
},
);
const auth = new apiGateway.CfnAuthorizer(this, '<id>', {
name: "<authorizer-name>",
type: apiGateway.AuthorizationType.COGNITO,
authorizerResultTtlInSeconds: 300,
identitySource: "method.request.header.Authorization",
restApiId: api.restApiId,
providerArns: ['<userPool.userPoolArn>'],
});
const myLambda= new lambda.Function(this, "<id>", {
functionName: '<lambda-name>',
runtime: lambda.Runtime.NODEJS_10_X,
handler: "<your-handler>",
code: lambda.Code.fromAsset("<path>"), // TODO: modify the way to get the path
});
const lambdaIntegration = new apiGateway.LambdaIntegration(myLambda);
const resource = api.root.resourceForPath('<your-api-path>');
// When the API will be deployed, the URL will look like this
// https://xxxxxx.execute-api.us-east-2.amazonaws.com/dev/<your-api-path>
const authorizationOptions = {
apiKeyRequired: false,
authorizer: {authorizerId: auth.ref},
authorizationType: 'COGNITO_USER_POOLS'
};
resource.addMethod(
GET, // your method
lambdaIntegration,
authorizationOptions
);
For the weirdos using the Java version of the CDK (like me), you can utilize the setters on the Cfn constructs:
final UserPool userPool = ...
final RestApi restApi = ...
final LambdaIntegration integration = ...
final Method method = restApi.getRoot().addMethod("GET", integration);
final CfnAuthorizer cognitoAuthorizer = new CfnAuthorizer(this, "CfnCognitoAuthorizer",
CfnAuthorizerProps.builder()
.name("CognitoAuthorizer")
.restApiId(restApi.getRestApiId())
.type("COGNITO_USER_POOLS")
.providerArns(Arrays.asList(userPool.getUserPoolArn()))
.identitySource("method.request.header.Authorization")
.build());
final CfnMethod cfnMethod = (CfnMethod) method.getNode().getDefaultChild();
cfnMethod.setAuthorizationType("COGNITO_USER_POOLS");
cfnMethod.setAuthorizerId(cognitoAuthorizer.getRef());
As of v1.88 this is now supported directly in CDK
const userPool = new cognito.UserPool(this, 'UserPool');
const auth = new apigateway.CognitoUserPoolsAuthorizer(this, 'booksAuthorizer', {
cognitoUserPools: [userPool]
});
declare const books: apigateway.Resource;
books.addMethod('GET', new apigateway.HttpIntegration('http://amazon.com'), {
authorizer: auth,
authorizationType: apigateway.AuthorizationType.COGNITO,
});
AWS CDK API V1 Reference
AWS CDK API V2 Reference
If you are currently using CDK v2 or CDK v1(latest). This has been made easy:
// Your cognito userpool
const userPool = new cognito.UserPool(this, "MyUserPool")
// Authorizer for your userpool
const cognitoAuthorizer = new apigw.CognitoUserPoolsAuthorizer(
this,
"CognitoAuthorierOnLambda",
{
cognitoUserPools: [userPool],
}
);
// Rest Api
const api = new apigw.RestApi(this, 'myApi', {
defaultCorsPreflightOptions: {
allowOrigins: apigw.Cors.ALL_ORIGINS,
allowMethods: apigw.Cors.ALL_METHODS, // this is also the default
},
deploy: true
});
// add /getItems endpoint
const getItems = api.root.addResource('getItems');
// attach lambda to this endpoint
const getItemsIntegration = new apigw.LambdaIntegration(getItemsLambdaFunction);
// make the endpoint secure with cognito userpool
getItems.addMethod('GET', getAllItemsIntegration, {
authorizer:cognitoAuthorizer
});

Lambda function change endpoint

I am somewhat new to Lambda and am trying to pull some data from Support(us-east-1) and then Read/Write to a DynamoDB(I am using a local dynamodb-local instance), however I dont know how to change the region.
const AWS = require('aws-sdk');
AWS.config.update({
region: 'us-east-1',
});
const support = new AWS.Support({
region: 'us-east-1',
apiVersion: '2013-04-15'
});
const supportParams = {
checkId: 'Qch7DwouX1',
language: 'en'
};
let stuff = {};
support.describeTrustedAdvisorCheckResult(supportParams, (err, data) => {
if(err) console.log('Error: ', err.stack);
else {
stuff[test] = [...data]
};
}
// Now I want to pull some data from DynamoDB locally or in another region
//
// AWS.config.update({endpoint: 'http://localhost:8000});
//
How do I change the endpoint to http://localhost:8000 or us-west-2 to get something from DynamoDB? Am I not supposed to change region/endpoint within 1 lambda function?
I was trying something like:
const dynaDB = new AWS.DynamoDB({endpoint: 'http://localhost:8000'})
const dynaClient = new AWS.DynamoDB.DocumentClient();
dynaClient.scan({}, (err, data) => {
..
..
..
}
We had the same problem when we want to copy between two regions.
You can instantiate aws-sdk one for each dynamodb,
const AWSregion = require('aws-sdk');
AWSregion.config.update({
region: 'us-east-1',
});
// Connect to us-east-1 with AWSregion
const AWSlocal = require('aws-sdk'); // Don't set any region here, since it is local
// Connect to local dynamodb with AWSlocal
Hope it helps.

Categories

Resources