Strapi backend broken when enable custom middleware - javascript

I created a custom middleware that makes the api & admin unreachable when I enable it.
The middleware is pretty simple, it adds a request id to incoming request on the server:
const { createNamespace } = require('cls-hooked');
const { v4: uuidv4 } = require('uuid');
const loggerNamespace = createNamespace('logger');
module.exports = (strapi) => {
return {
initialize() {
strapi.app.use((ctx, next) => {
const reqId = ctx.request.get('X-Request-Id') || uuidv4();
ctx.response.set('X-Request-Id', reqId);
loggerNamespace.run(() => {
loggerNamespace.set('requestId', reqId);
next();
});
});
},
};
};
It's enable using the config file ./config/middleware.json:
module.exports = {
settings: {
addRequestId: {
enabled: true,
},
},
};
Then, when enabled, calling an API endpoint or trying to connect to admin results in 404 Not Found.
I use strapi 3.6.8 with node 14.18.1.
Any idea why?
PS: I suspected cls-hooked to be the culprit but removing it for testing with an anemic middleware doesn't work either.

#Salvino put me on the right track suggesting that I should await the execution of next.
Digging in the code of cls-hooked, I found the method runPromise that is similar to run and returns a Promise that can be awaited. It solved the issue.
Fixed code:
const { createNamespace } = require('cls-hooked');
const { v4: uuidv4 } = require('uuid');
const loggerNamespace = createNamespace('logger');
module.exports = (strapi) => {
return {
initialize() {
strapi.app.use(async (ctx, next) => {
const reqId = ctx.request.get('X-Request-Id') || uuidv4();
ctx.response.set('X-Request-Id', reqId);
await loggerNamespace.runPromise(async () => {
loggerNamespace.set('requestId', reqId);
await next();
});
});
},
};
};

Related

Access token missing from Keycloak context

I am trying to make an authenticated request from postman to my node, apollo, express backend. I am gettting an error saying that the user is unauthenticated. When I look at the context object, there is no access token and calling context.kauth.isAuthenticated() returns false.
Looking at the access token, I can see that accessToken is indeed blank, but there does exist the Bearer Token in the request header.
So I am not sure why the access token is not being included.
I am making the request from postman, I am including the token in the request like so:
In order to get this access token, I am first making a postman request to Keycloak to generate this token like so (note that I am intentionally not showing my username and password for this post
I am using the above access token in my postman request above.
This is what my index.js file looks like:
require("dotenv").config();
import { ApolloServer } from "apollo-server-express";
import { ApolloServerPluginDrainHttpServer } from "apollo-server-core";
const { makeExecutableSchema } = require('#graphql-tools/schema');
import { configureKeycloak } from "./auth/config"
import {
KeycloakContext,
KeycloakTypeDefs,
KeycloakSchemaDirectives,
} from "keycloak-connect-graphql";
import { applyDirectiveTransformers } from "./auth/transformers";
import express from "express";
import http from "http";
import typeDefs from "./graphql/typeDefs";
import resolvers from "./graphql/resolvers";
import { MongoClient } from "mongodb";
import MongoHelpers from "./dataSources/MongoHelpers";
async function startApolloServer(typeDefs, resolvers) {
const client = new MongoClient(process.env.MONGO_URI);
client.connect();
let schema = makeExecutableSchema({
typeDefs: [KeycloakTypeDefs, typeDefs],
resolvers
});
schema = applyDirectiveTransformers(schema);
const app = express();
const httpServer = http.createServer(app);
const { keycloak } = configureKeycloak(app, '/graphql')
const server = new ApolloServer({
schema,
schemaDirectives: KeycloakSchemaDirectives,
resolvers,
context: ({ req }) => {
return {
kauth: new KeycloakContext({ req }, keycloak)
}
},
plugins: [ApolloServerPluginDrainHttpServer({ httpServer })],
});
await server.start();
server.applyMiddleware({ app });
await new Promise((resolve) => httpServer.listen({ port: 4000 }, resolve));
console.log(`🚀 Server ready at http://localhost:4000${server.graphqlPath}`);
}
startApolloServer(typeDefs, resolvers);
And this is my keycloak.json file:
I am really quite stummped, my initial thought is that I am not making the reqest from postman correctly. Am grateful for any guidance
Requirements:
use node, apollo, express to get keycloak Authentication and Authorization based on the keycloak-connect middleware
using Postman to make an authenticated call with a Bearer token.
index.js in the question is not a minimal, reproducible example because, for example, the parts in typeDefs, ./auth/transformers and so on are missing.
There is a cool description at https://github.com/aerogear/keycloak-connect-graphql with nice example code.
So if one changes your approach only slightly (e.g. mongodb is not needed) and then adds the also slightly changed code from the description of the Github page accordingly, one can get a standalone running index.js.
For example, it might look something like this:
"use strict";
const {ApolloServer, gql} = require("apollo-server-express")
const {ApolloServerPluginDrainHttpServer} = require("apollo-server-core")
const {makeExecutableSchema} = require('#graphql-tools/schema');
const {getDirective, MapperKind, mapSchema} = require('#graphql-tools/utils')
const {KeycloakContext, KeycloakTypeDefs, auth, hasRole, hasPermission} = require("keycloak-connect-graphql")
const {defaultFieldResolver} = require("graphql");
const express = require("express")
const http = require("http")
const fs = require('fs');
const path = require('path');
const session = require('express-session');
const Keycloak = require('keycloak-connect');
function configureKeycloak(app, graphqlPath) {
const keycloakConfig = JSON.parse(fs.readFileSync(path.resolve(__dirname, 'config/keycloak.json')));
const memoryStore = new session.MemoryStore();
app.use(session({
secret: process.env.SESSION_SECRET_STRING || 'this should be a long secret',
resave: false,
saveUninitialized: true,
store: memoryStore
}));
const keycloak = new Keycloak({
store: memoryStore
}, keycloakConfig);
// Install general keycloak middleware
app.use(keycloak.middleware({
admin: graphqlPath
}));
// Protect the main route for all graphql services
// Disable unauthenticated access
app.use(graphqlPath, keycloak.middleware());
return {keycloak};
}
const authDirectiveTransformer = (schema, directiveName = 'auth') => {
return mapSchema(schema, {
[MapperKind.OBJECT_FIELD]: (fieldConfig) => {
const authDirective = getDirective(schema, fieldConfig, directiveName)?.[0];
if (authDirective) {
const {resolve = defaultFieldResolver} = fieldConfig;
fieldConfig.resolve = auth(resolve);
}
return fieldConfig;
}
})
}
const directive = (keys, key, directive, directiveName) => {
if (keys.length === 1 && keys[0] === key) {
let dirs = directive[keys[0]];
if (typeof dirs === 'string') dirs = [dirs];
if (Array.isArray(dirs)) {
return dirs.map((val) => String(val));
} else {
throw new Error(`invalid ${directiveName} args. ${key} must be a String or an Array of Strings`);
}
} else {
throw Error(`invalid ${directiveName} args. must contain only a ${key} argument`);
}
}
const permissionDirectiveTransformer = (schema, directiveName = 'hasPermission') => {
return mapSchema(schema, {
[MapperKind.OBJECT_FIELD]: (fieldConfig) => {
const permissionDirective = getDirective(schema, fieldConfig, directiveName)?.[0];
if (permissionDirective) {
const {resolve = defaultFieldResolver} = fieldConfig;
const keys = Object.keys(permissionDirective);
let resources = directive(keys, 'resources', permissionDirective, directiveName);
fieldConfig.resolve = hasPermission(resources)(resolve);
}
return fieldConfig;
}
})
}
const roleDirectiveTransformer = (schema, directiveName = 'hasRole') => {
return mapSchema(schema, {
[MapperKind.OBJECT_FIELD]: (fieldConfig) => {
const roleDirective = getDirective(schema, fieldConfig, directiveName)?.[0];
if (roleDirective) {
const {resolve = defaultFieldResolver} = fieldConfig;
const keys = Object.keys(roleDirective);
let role = directive(keys, 'role', roleDirective, directiveName);
fieldConfig.resolve = hasRole(role)(resolve);
}
return fieldConfig;
}
})
}
const applyDirectiveTransformers = (schema) => {
return authDirectiveTransformer(roleDirectiveTransformer(permissionDirectiveTransformer(schema)));
}
const typeDefs = gql`
type Query {
hello: String #hasRole(role: "developer")
}
`
const resolvers = {
Query: {
hello: (obj, args, context, info) => {
console.log(context.kauth)
console.log(context.kauth.isAuthenticated())
console.log(context.kauth.accessToken.content.preferred_username)
const name = context.kauth.accessToken.content.preferred_username || 'world'
return `Hello ${name}`
}
}
}
async function startApolloServer(typeDefs, resolvers) {
let schema = makeExecutableSchema({
typeDefs: [KeycloakTypeDefs, typeDefs],
resolvers
});
schema = applyDirectiveTransformers(schema);
const app = express();
const httpServer = http.createServer(app);
const {keycloak} = configureKeycloak(app, '/graphql')
const server = new ApolloServer({
schema,
resolvers,
context: ({req}) => {
return {
kauth: new KeycloakContext({req}, keycloak)
}
},
plugins: [ApolloServerPluginDrainHttpServer({httpServer})],
});
await server.start();
server.applyMiddleware({app});
await new Promise((resolve) => httpServer.listen({port: 4000}));
console.log(`🚀 Server ready at http://localhost:4000${server.graphqlPath}`);
}
startApolloServer(typeDefs, resolvers);
The corresponding package.json:
{
"dependencies": {
"#graphql-tools/schema": "^8.3.10",
"#graphql-tools/utils": "^8.6.9",
"apollo-server-core": "^3.6.7",
"apollo-server-express": "^3.6.7",
"express": "^4.17.3",
"express-session": "^1.17.2",
"graphql": "^15.8.0",
"graphql-tools": "^8.2.8",
"http": "^0.0.1-security",
"keycloak-connect": "^18.0.0",
"keycloak-connect-graphql": "^0.7.0"
}
}
Call With Postman
As one can see, the authenticated call is then successful. Also, with the above code, the accessToken is logged correctly to the debug console:
This is certainly not the functionality that exactly meets your requirements. But you may be able to gradually make the desired/necessary changes from this running example depending on your requirements.

Asynchronous code works independently, but not together. Not getting any exceptions, just timing out

I am writing a lambda function to add hosts to a SQS queue for a rolling restart. The code I have written works individually, but not together. Even when I hard code values in the constructor. This doesn't appear to be a memory/CPU. I tried running the function with 1GB of memory, even though it only uses about 80MB. The average execution time for the individual functions is about 0.5 seconds (shouldn't take more than about 1.5 seconds to execute in total). I did trying running this function with a 30 second timeout, but it still timed out.
I work behind a corporate proxy, and have to hand jam the code. I don't have an IDE or intellisense on my internet facing network. There may be typos here, but not in the actual code. I have omitted my module imports and variable declarations to save time. It isn't relevant to the issue at hand.
EDIT: I added the module imports and variable declarations to the first example to hopefully alleviate some confusion.
Here are just a few things I have tried. This does not work (timing out):
// Custom lambda layer
const { marklogic, aws } = require('nodejs-layer-lib');
const { HOSTS, DOMAIN, PORT, USERNAME, PASSWORD, RESTART_QUEUE_NAME } = process.env;
const params = [
'format=json'
];
const options = {
port: PORT,
params: params,
httpOptions: {
headers: {
'Authorization': `Basic ${Buffer.from(`${USERNAME}:${PASSWORD}`).toString('base64')}`
},
method: 'GET'
}
};
const taskServers = (HOSTS.split(',') || []).map(host => {
const _host = host.split(':');
return {
id: _host[0],
name: `http://${_host[1].toLowerCase()}.${DOMAIN}`
};
});
exports.handler = async () => {
let hosts, queueUrl, addToQueueResults;
try {
hosts = (await marklogic.hosts.getHosts(taskServers, options) || []);
} catch (e) { console.error('hosts', e); }
try {
queueUrl = await aws.sqs.getQueueUrlByName(RESTART_QUEUE_NAME);
} catch (e) { console.error('queueUrl ', e); }
try {
addToQueueResults = await aws.sqs.addMessages(queueURL, hosts);
} catch (e) { console.error('addToQueueResults ', e); }
return {
status: 200,
body: addToQueueResults
};
}
This does not work (timing out):
// Modules imports and variable declarations here...
exports.handler = async () => {
const hosts = (await marklogic.hosts.getHosts(taskServers, options) || []);
const queueUrl = await aws.sqs.getQueueUrlByName(RESTART_QUEUE_NAME);
const addToQueueResults = await aws.sqs.addMessages(queueURL, hosts);
return {
status: 200,
body: addToQueueResults
};
}
This does not work (timing out):
// Modules imports and variable declarations here...
exports.handler = async () => {
const hosts = (await marklogic.hosts.getHosts(taskServers, options) || []);
const queueUrl = await aws.sqs.getQueueUrlByName('my-queue-name');
const addToQueueResults = await aws.sqs.addMessages('http://queueurl.com', ['anything', 'in', 'here']); // Doesn't even need the queueUrl or hosts anymore
return {
status: 200,
body: addToQueueResults
};
}
This works. It will return the host objects I am expecting in the response:
// Modules imports and variable declarations here...
exports.handler = async () => {
const hosts = (await marklogic.hosts.getHosts(taskServers, options) || []);
return {
status: 200,
body: hosts
};
}
This works. It will get the queue url, then add messages to my SQS queue and return the SQS response:
// Modules imports and variable declarations here...
exports.handler = async () => {
const queueUrl = await aws.sqs.getQueueUrlByName(RESTART_QUEUE_NAME);
const addToQueueResults = await aws.sqs.addMessages(queueUrl , ['anything', 'in', 'here']);
return {
status: 200,
body: addToQueueResults
};
}
I tried implementing the Async handler in AWS Lambda function handler in Node.js and reviewed many AWS Lambda execution troubleshooting documents. The marklogic management API runs on port 8002 by default and I think the aws-sdk module uses http/https (80/443), so I don't think the ports are getting tied up.
What am I missing here?
EDIT 2: This has something to do with how promises are handled with AWS Lambda. I cannot find much information about this. Even following the instructions in AWS Lambda function handler in Node.js for "Async Handlers" I cannot get this to work. It works perfectly fine locally with or without my custom lambda layer.
Node.js runtime: 12.x (I didn't mention this before)
This also doesn't work (timing out):
// Modules imports and variable declarations here...
exports.handler = async function (event) {
const promise = function () {
return new Promise(async function (resolve, reject) {
try {
const hosts = await marklogic.hosts.getHosts(taskServers, options) || [];
const queueUrl = await aws.sqs.getQueueUrlByName(RESTART_QUEUE_NAME);
const addToQueueResults = await aws.sqs.addMessages(queueUrl, hosts);
resolve({
status: 200,
body: addToQueueResults
});
} catch (error) {
reject({
status: 500,
error: error
});
}
});
};
return promise(); // Throws error without constructor despite the AWS doc example
}
Unless someone AWS Lambda genius has ran into a similar issue before with Node.js, I am just going to convert it into 2 lambda functions and use Step Functions to process them.
There was a typo in queueUrl (I imagine not that, but worth a try!)
Please run:
// Custom lambda layer
const { marklogic, aws } = require('nodejs-layer-lib');
const { HOSTS, DOMAIN, PORT, USERNAME, PASSWORD, RESTART_QUEUE_NAME } = process.env;
const params = [
'format=json'
];
const options = {
port: PORT,
params,
httpOptions: {
headers: {
Authorization: `Basic ${Buffer.from(`${USERNAME}:${PASSWORD}`).toString('base64')}`
},
method: 'GET'
}
};
const taskServers = (HOSTS.split(',') || []).map(host => {
const _host = host.split(':');
return {
id: _host[0],
name: `http://${_host[1].toLowerCase()}.${DOMAIN}`
};
});
exports.handler = async () => {
let hosts, queueUrl, addToQueueResults;
try {
hosts = (await marklogic.hosts.getHosts(taskServers, options) || []);
} catch (e) { console.error('hosts', e); }
try {
queueUrl = await aws.sqs.getQueueUrlByName(RESTART_QUEUE_NAME);
} catch (e) { console.error('queueUrl ', e); }
try {
addToQueueResults = await aws.sqs.addMessages(queueUrl, hosts);
} catch (e) { console.error('addToQueueResults ', e); }
return {
status: 200,
body: JSON.stringify(addToQueueResults)
};
};
// keeping the same format.. ^^
If no luck - what is on my mind, is as aws-sdk is included out of the box in lambda.. it's not customary to require it extraneously via a layer and although it may not look to be imported at top level by marklogic, it may be bundled deep within marklogic, then when you import AWS and change config (in the layer) it overwrites it
Let's find out..:
Step 1:
So, this you say should work.. if we ignore the AWS import, and just import marklogic?
// Custom lambda layer
// const { marklogic, aws } = require('nodejs-layer-lib'); // ignoring AWS for now
const { marklogic } = require('nodejs-layer-lib');
const { HOSTS, DOMAIN, PORT, USERNAME, PASSWORD, RESTART_QUEUE_NAME } = process.env;
const params = [
'format=json'
];
const options = {
port: PORT,
params,
httpOptions: {
headers: {
Authorization: `Basic ${Buffer.from(`${USERNAME}:${PASSWORD}`).toString('base64')}`
},
method: 'GET'
}
};
const taskServers = (HOSTS.split(',') || []).map(host => {
const _host = host.split(':');
return {
id: _host[0],
name: `http://${_host[1].toLowerCase()}.${DOMAIN}`
};
});
exports.handler = async () => {
// let hosts, queueUrl, addToQueueResults;
let hosts;
try {
hosts = (await marklogic.hosts.getHosts(taskServers, options) || []);
console.log('hosts => ', hosts);
// queueUrl = await aws.sqs.getQueueUrlByName(RESTART_QUEUE_NAME);
// addToQueueResults = await aws.sqs.addMessages(queueUrl, hosts);
return {
status: 200,
body: JSON.stringify(hosts)
};
} catch (error) {
console.log('error => ', error);
throw error;
}
};
Ok, so if that works..:
Step 2 (Please set the region for SQS and also hard code in the queueUrl):
// Custom lambda layer
// const { marklogic, aws } = require('nodejs-layer-lib');
const { marklogic } = require('nodejs-layer-lib');
const AWS = require('aws-sdk');
AWS.config.update({ region: 'eu-west-1' }); // Please set region accordingly
const sqs = new AWS.SQS({ apiVersion: '2012-11-05' });
const { HOSTS, DOMAIN, PORT, USERNAME, PASSWORD, RESTART_QUEUE_NAME } = process.env;
const params = [
'format=json'
];
const options = {
port: PORT,
params,
httpOptions: {
headers: {
Authorization: `Basic ${Buffer.from(`${USERNAME}:${PASSWORD}`).toString('base64')}`
},
method: 'GET'
}
};
const taskServers = (HOSTS.split(',') || []).map(host => {
const _host = host.split(':');
return {
id: _host[0],
name: `http://${_host[1].toLowerCase()}.${DOMAIN}`
};
});
exports.handler = async () => {
let hosts, addToQueueResults;
try {
hosts = (await marklogic.hosts.getHosts(taskServers, options) || []);
console.log('hosts => ', hosts);
const queueUrl = 'Please hard code the queueUrl for now';
const sqsParams = {
MessageBody: hosts,
QueueUrl: queueUrl
};
addToQueueResults = await sqs.sendMessage(sqsParams).promise();
console.log('addToQueueResults => ', addToQueueResults);
return {
status: 200,
body: JSON.stringify(addToQueueResults)
};
} catch (error) {
console.log('error => ', error);
throw error;
}
};
IF.. that doesn't work.. then Step 3.. move the require of marklogic to below the require of AWS and setting the region in this last example.. (so any deeply nested marklogic AWS logic we're unaware of now overwrites your AWS require..) re-run it.. fingers crossed :-)

Async export of redis client in nodejs

The following code constructs a redis client and exports. I am fetching the redis password from vault secret management service and that call is a promise/async. The code doesnt wait for that call and it exports the redis client before async call completes. I am not sure what I am doing wrong here. Any idea?
import redis from 'redis';
import bluebird from 'bluebird';
import logger from '../logger';
import srvconf from '../srvconf';
import { getVaultSecret } from '../services/vault.service';
const vaultConfig = srvconf.get('vault');
bluebird.promisifyAll(redis);
let redisUrl = '';
const maskRedisUrl = (url) => url.replace(/password=.*/, 'password=*****');
const setRedisUrl = (host, port, pw) => {
const pwstring = pw ? `?password=${pw}` : '';
const url = `redis://${host}:${port}${pwstring}`;
console.log(`Setting redis_url to '${maskRedisUrl(url)}'`);
return url;
}
if (vaultConfig.use_vault) {
(async () => {
const secret = await getVaultSecret(`${vaultConfig.redis.secrets_path + vaultConfig.redis.key}`)
redisUrl = setRedisUrl(srvconf.get('redis_host'), srvconf.get('redis_port'), secret.PASSWORD);
})().catch(err => console.log(err));
} else {
if (!srvconf.get('redis_url')) {
redisUrl = setRedisUrl(srvconf.get('redis_host'), srvconf.get('redis_port'), srvconf.get('redis_password'));;
} else {
redisUrl = srvconf.get('redis_url');
console.log(`Found redis_url ${maskRedisUrl(redisUrl)}`);
}
}
const options = redisUrl
? { url: redisUrl }
: {};
const redisClient = redis.createClient(options);
redisClient.on('error', err => {
logger.error(err);
});
export default redisClient;
The problem is that (async () => {...})() returns a Promise and you are not awaiting it at the top-level, so the script continues to run past that line, sets options = {} and returns the redisClient.
What you need is a top-level await which is enabled by default in Node versions >= 14.8.0. However, if your project uses a version older than that, there is a workaround as shown below.
Please note that the below code is NOT tested since I do not have the same project setup locally.
Module
import redis from "redis";
import bluebird from "bluebird";
import logger from "../logger";
import srvconf from "../srvconf";
import { getVaultSecret } from "../services/vault.service";
const vaultConfig = srvconf.get("vault");
bluebird.promisifyAll(redis);
let redisUrl = "";
let redisClient = null;
const initRedisClient = () => {
const options = redisUrl ? { url: redisUrl } : {};
redisClient = redis.createClient(options);
redisClient.on("error", (err) => {
logger.error(err);
});
};
const maskRedisUrl = (url) => url.replace(/password=.*/, "password=*****");
const setRedisUrl = (host, port, pw) => {
const pwstring = pw ? `?password=${pw}` : "";
const url = `redis://${host}:${port}${pwstring}`;
console.log(`Setting redis_url to '${maskRedisUrl(url)}'`);
return url;
};
(async () => {
if (vaultConfig.use_vault) {
try {
const secret = await getVaultSecret(
`${vaultConfig.redis.secrets_path + vaultConfig.redis.key}`
);
redisUrl = setRedisUrl(
srvconf.get("redis_host"),
srvconf.get("redis_port"),
secret.PASSWORD
);
} catch (err) {
console.log(err);
}
} else {
if (!srvconf.get("redis_url")) {
redisUrl = setRedisUrl(
srvconf.get("redis_host"),
srvconf.get("redis_port"),
srvconf.get("redis_password")
);
} else {
redisUrl = srvconf.get("redis_url");
console.log(`Found redis_url ${maskRedisUrl(redisUrl)}`);
}
}
// Initialize Redis client after vault secrets are loaded
initRedisClient();
})();
export default redisClient;
Usage
At all places where you import and use the client, you always need to check if it is actually initialized successfully, and throw (and catch) a well defined error if it is not.
const redisClient = require("path/to/module");
...
if (redisClient) {
// Use it
} else {
throw new RedisClientNotInitializedError();
}
...

Mocking Twilio Client from twilio-node package

I currently use The Twilio Node Helper Library to do various API calls whether it may be to create assistants/services, list them, remove them and various other things when it comes to uploading tasks, samples, fields to create a chatbot on Twilio Autopilot.
An example of one some of these functions include:
async function createAssistant(name, client){
var assistantUid = ""
await client.autopilot.assistants
.create({
friendlyName: name,
uniqueName: name
})
.then(assistant => assistantUid = assistant.sid);
return assistantUid
}
async function getAccount(client){
var valid = true
try {
await client.api.accounts.list()
} catch (e) {
valid = false
}
return valid
}
async function connectToTwilio(twilioAccountSid, twilioAuthToken) {
try{
var client = twilio(twilioAccountSid, twilioAuthToken);
} catch (e){
throw new TwilioRequestError(e)
}
var valid = await getAccount(client)
if(valid && client._httpClient.lastResponse.statusCode === 200){
return client
} else{
throw new Error("Invalid Twilio Credentials")
}
}
where client is the client object returned from require("twilio")(twilioAccountSid, twilioAuthToken).
I was wondering what would the best way of mocking this API to allow me to emulate creating assistants, returning their uniqueNames etc..
I was wondering that I may just define some class like
class TwilioTestClient{
constructor(sid, token){
this.sid = sid
this.token = token
this.assistants = TwilioAssistant()
this.services = TwilioServices()
}
}
Where TwilioAssitant and TwilioServices will be additional classes.
Any help would be greatly appreciated!
I struggled with mocking Twilio for a long time. In fact I previously architected my application such that I could mock a wrapper around the Twilio Node Helper just to avoid mocking the actual library. But recent changes to the architecture meant that was no longer an option. This morning I finally got a mock of the Twilio Node Helper Library working. I'm not familiar with the portions of the Twilio library you are using, but I'm hopeful the example here will help you.
We have a function to check if a phone number is mobile, call it isMobile.js.
const Twilio = require("twilio");
const isMobile = async (num) => {
const TwilioClient = new Twilio(process.env.TWILIO_SID, process.env.TWILIO_AUTH_TOKEN);
try {
const twilioResponse = await TwilioClient.lookups.v1
.phoneNumbers(num)
.fetch({ type: "carrier", mobile_country_code: "carrier" });
const { carrier: { type } = {} } = twilioResponse;
return type === "mobile";
} catch (e) {
return false;
}
};
module.exports = isMobile;
Then build a mock for Twilio in __mocks__/twilio.js
const mockLookupClient = {
v1: { phoneNumbers: () => ({ fetch: jest.fn(() => {}) }) }
};
module.exports = class Twilio {
constructor(sid, token) {
this.lookups = mockLookupClient;
}
};
In the test file isMobile.test.js
jest.mock("twilio");
const Twilio = require("twilio");
const isMobile = require("./isMobile");
const mockFetch = jest.fn();
const mockPhoneNumbers = jest.fn(() => ({
fetch: mockFetch
}));
describe("isMobile", () => {
const TwilioClient = new Twilio(process.env.TWILIO_SID, process.env.TWILIO_AUTH_TOKEN);
const lookupClient = TwilioClient.lookups.v1;
lookupClient.phoneNumbers = mockPhoneNumbers;
beforeEach(() => {
mockFetch.mockReset();
});
test("is a function", () => {
expect(typeof isMobile).toBe("function");
});
test("returns true for valid mobile number", async () => {
const validMobile = "+14037007492";
mockFetch.mockReturnValueOnce({
carrier: { type: "mobile", mobile_country_code: 302 }, // eslint-disable-line camelcase
phoneNumber: validMobile
});
expect(await isMobile(validMobile)).toBe(true);
});
test("returns false for non-mobile number", async () => {
const invalidMobile = "+14035470770";
mockFetch.mockReturnValueOnce({
carrier: { type: "not-mobile", mobile_country_code: null }, // eslint-disable-line camelcase
phoneNumber: invalidMobile
});
expect(await isMobile(invalidMobile)).toBe(false);
});
});

Requests to Express app come with empty body though data being sent, but only in one route

it's nice to join the group of people brave enough to ask the questions on Stack, so that everyone cantake advantage :)
My problem is pretty strange. I'm writing an app in Express, I have two routes so far and everything is going pretty smoothly, yet I've encountered one problem, which I can not seem to solve. In one route, with the patch method, the incoming requests have emmpty body. The rest of app is running smoothly, everything is working fine and this one route seems to be broken, I can not figure out why. Strange enough, yet I found out that the requests DO have body in one case - when I'm sending requests with my tests (supertest) using the .send({ ... }) method. When I'm sending requests with .attach or .field - they come empty. Same with requests sent from Postman (empty). What is causing such strange behavior?
Here are my tests:
const request = require('supertest');
const Image = require('../models/image');
const app = require('../app');
const crypto = require('crypto');
const fs = require('fs')
const { setupImages } = require('./fixtures/db')
beforeEach(setupImages);
describe('[IMAGE] - ', () => {
test('Should get images', async () => {
const main_img = await Image.findOne({ main: true });
const image = await request(app)
.get(`/image/${main_img._id}`)
.expect(200);
expect(image.header['content-type']).toBe('image/png');
});
test('Should delete images', async () => {
const image = await Image.findOne({ description: 'Lorem ipsum' });
await request(app)
.delete(`/image/${image._id}`);
const imageFound = await Image.findById(image._id);
expect(imageFound).toBeNull();
});
//TEST THAT FAILS
test('Should edit images', async () => {
const image = await Image.findOne({ main: false });
await request(app)
.patch(`/image/${image._id}`)
.field('description', 'new desc')
.attach('image', './src/tests/fixtures/imgtest.png')
.expect(200);
const returnChecksum = file => {
return crypto
.createHash('md5')
.update(file, 'utf8')
.digest('hex')
}
const imageEdited = await Image.findById(image._id);
const newImageChecksum = returnChecksum(fs.readFileSync(__dirname + '/fixtures/imgtest.png'));
expect(returnChecksum(imageEdited.image)).toBe(newImageChecksum);
expect(imageEdited.description).toBe('new desc');
});
})
Here are image routes
const express = require('express');
const router = new express.Router();
const Image = require('../models/image');
const chalk = require('chalk');
router.get('/image/:id', async (req, res) => {
const { id } = req.params;
try {
const image = await Image.findById(id);
if (!image) {
return res.status(404).send()
}
res.set('Content-Type', 'image/png');
res.send(image.image);
} catch (e) {
console.log(chalk.red('Error serving image: ') + e);
res.send(500);
}
});
//THE ROUTE THAT FAILS
router.patch('/image/:id', async (req, res) => {
const { id } = req.params;
const updateFields = Object.entries(req.body);
console.log('image patch req body', req.body)
try {
const imageEdited = await Image.findById(id, function (err, doc) {
if (err) { return err; }
updateFields.forEach(field => doc[field[0]] = field[1])
doc.save(res.status(200).send(doc));
});
if (!imageEdited) {
res.status(400).send();
}
} catch (e) {
res.status(500).send();
console.log(chalk.red('Error editing image: ') + e);
}
});
router.delete('/image/:id', async (req, res) => {
const { id } = req.params;
try {
await Image.findByIdAndDelete(id);
res.status(200).send();
} catch (e) {
res.status(500).send();
console.log(chalk.red('Error deleting image: ') + e);
}
});
module.exports = router;
And my app.js file:
const express = require('express');
require('./db/mongoose');
const productRouter = require('./routers/product');
const imageRouter = require('./routers/image');
const app = express();
app.use(express.json());
app.use(productRouter);
app.use(imageRouter);
module.exports = app;
The result of console.log in image route:
console.log src/routers/image.js:30
image patch req body {}
And this is the behavior of the app with changed sending method in test:
test('Should edit images', async () => {
const image = await Image.findOne({ main: false });
await request(app)
.patch(`/image/${image._id}`)
// .field('description', 'new desc')
// .attach('image', './src/tests/fixtures/imgtest.png')
.send({description: 'new desc'})
.expect(200);
const returnChecksum = file => {
return crypto
.createHash('md5')
.update(file, 'utf8')
.digest('hex')
}
const imageEdited = await Image.findById(image._id);
const newImageChecksum = returnChecksum(fs.readFileSync(__dirname + '/fixtures/imgtest.png'));
expect(returnChecksum(imageEdited.image)).toBe(newImageChecksum);
expect(imageEdited.description).toBe('new desc');
});
console.log src/routers/image.js:30
image patch req body { description: 'new desc' }
Thanks in advance!
I've managed to fix it myself, the problem lied in fact that I've forgot about multer. So this strange behavior can be result of not using said library, even if you're not sending any files apparently.

Categories

Resources