I need to pull in configs from secrets manager before wiring up my proxies
const express = require('express');
const { createProxyMiddleware } = require('http-proxy-middleware');
const { SecretsManagerClient, GetSecretValueCommand } = require('#aws-sdk/client-secrets-manager');
const pjson = require('../app/config');
const app = express();
async function getSecretsManager(){
return secrets; // (this functions correctly, redacting for security)
}
const buildProxies = async (app) => {
const proxies = pjson['nonprod-proxies'];
const secrets = await getSecretsManager();
let proxiesToMap = [];
proxies.forEach(proxy => {
console.log(`[PROXY SERVER] Creating proxy for: ${proxy['proxy-path']}`);
let target, headers, options;
const rewrite = `^${proxy['proxy-path']}`;
if (proxy['internal'])
{
target = `https://${secrets['apiDomain']}`;
headers = {'x-api-key': secrets['apiKey']};
options = {
target: target,
changeOrigin: true,
logLevel: 'debug',
headers
}
} else {
target = proxy['proxy-domain'];
options = {
target: `https://${target}`,
changeOrigin: true,
logLevel: 'debug',
pathRewrite: {
[rewrite]: ''
}
}
}
proxiesToMap.push({'path': proxy['proxy-path'], 'options': options})
});
return proxiesToMap;
};
module.exports = function(app){
buildProxies().then(proxies => {
proxies.forEach(proxyVal => {
console.log(`Proxy: ${proxyVal['path']} with options ${proxyVal['options']}`);
app.use(proxyVal['path'], createProxyMiddleware(proxyVal['options']))
});
});
console.log('=== SUCCESSFULY CREATED PROXY SERVER ===');
};
This results in localhost:3000 returning the boiler plate html cors response, however, when substituting the code for module.exports to be:
app.use('/internal/*', createProxyMiddleware({
target: '<my aws secret url>',
changeOrigin: true,
logLevel: 'debug',
headers: { 'x-api-key': '<my aws secret api key>' }
}));
The proxy works. Is there a way to do an asynchronous load of configs within setupProxy.js?
Related
Good Afternoon,
I have a react app hosted on Netlify and have converted my Expressjs API backend to be serverless for use with Netlify functions. Right now I am running into the below error:
502: ReferenceError: regeneratorRuntime is not defined", " at /var/task/server.js"
Server.js is the main API file that imports my middleware.
I have installed the babel regnerator plugin and added it to my babel file as well as importing it on the top of my Server.js file. I am not sure what else to do as I am new to the netlify platform.
Server.js
import "core-js/stable";
import "regenerator-runtime/runtime";
const express = require('express');
var expressSession = require('express-session');
const cors = require('cors');
const path = require('path');
const app = express();
const axios = require('axios');
const mongoose = require('mongoose');
const rateLimit = require("express-rate-limit");
const serverless = require('serverless-http');
require('dotenv').config()
// Middleware
const saveLoggedInUserMiddleware = require('../Middleware/SaveLoggedInUserMiddleware');
const hostValidationMiddleware = require('../Middleware/HostValidationMiddleware');
const sessionValidationMiddleware = require('../Middleware/SessionValidationMiddleware');
const updateUserDataMiddleware = require('../Middleware/UpdateUserDataMiddleware');
//Routers
const StoreInvoiceRouter = require('../Routes/Store-Update-Invoices');
const UserInvoicesRouter = require('../Routes/GetUserInvoices');
const UpdateUserProfileRouter = require('../Routes/UpdateUserProfile');
async function connectToDB() {
// Database
await mongoose.connect(process.env.mongo_url, { useNewUrlParser: true, useUnifiedTopology: true }, () => {
console.log('[connectToDB]: Connected to DB');
})
}
connectToDB();
// Implemented a rate limiter which is a TEMPORARY FIX for the infinite loop by the useEffect
const limiter = rateLimit({
windowMs: .1 * 60 * 1000, // 10 seconds limit
max: 4
});
// Creating the session in order to save user data to req.session
app.use(
expressSession({
secret: process.env.SESSION_SECRET,
resave: false,
saveUninitialized: false,
})
);
app.use('/fetchUserInvoices', limiter);
app.use(express.json());
//app.use(cors());
app.use(express.static('./build'));
app.use('/.netlify/functions/', hostValidationMiddleware, sessionValidationMiddleware, StoreInvoiceRouter);
app.use('/.netlify/functions/', saveLoggedInUserMiddleware, UserInvoicesRouter);
app.use('/.netlify/functions/', updateUserDataMiddleware, UpdateUserProfileRouter);
app.get('*', async (req,res) => {
res.sendFile('/Users/augustshah/Documents/Coding-Tools-new/Projects/Payment-Dashboard/build/index.html');
});
// Function makes call to endpoint that runs deletion login, this allows the client to only have to be refreshed once to
// render changes on the frontend.
async function updateDBCall() {
const url = `${process.env.REACT_APP_STORE_INVOICES}`;
const axiosConfig = {
method: 'get',
url
};
await axios(axiosConfig).catch((e) => {console.log(e)});
console.log('[updateDBCall]: Executed call to STORE_INVOICES')
};
// Supporting functions
updateDBCall();
app.listen(process.env.PORT || 8080, () => {
console.log(`Server listening on 8080`);
});
module.exports.handler = serverless(app);
babel.config.js
module.exports = api => {
api.cache.forever();
return {
presets: [
"#babel/preset-env",
"#babel/preset-react"
],
plugins: [
"add-react-displayname",
"#babel/plugin-proposal-class-properties",
"#babel/plugin-proposal-object-rest-spread",
"#babel/transform-runtime"
]
};
}
Changed my babel to the below as well as installed and implemented netlify-lambda
module.exports = api => {
api.cache.forever();
return {
"presets": [
[ "#babel/preset-env", {
"targets": {
"node": "8.10"
}
}]
],
plugins: [
"add-react-displayname",
"#babel/plugin-proposal-class-properties",
"#babel/plugin-proposal-object-rest-spread"
]
};
}
Whenever I try to run the function refreshStock() in an endpoint in one of the API endpoints /api/seller/deactivate it gives me this error:
Error: listen EADDRINUSE: address already in use :::3000
at Server.setupListenHandle [as _listen2] (net.js:1318:16)
at listenInCluster (net.js:1366:12)
at Server.listen (net.js:1452:7)
at C:\Users\***\Documents\GitHub\***\***\.next\server\pages\api\seller\deactivate.js:191:10
error Command failed with exit code 1.
info Visit https://yarnpkg.com/en/docs/cli/run for documentation about this command
It looks like it's trying to restart the server, but it happens after it compiles, is there something I'm doing wrong, I've followed a couple of tutorials on medium, and they give this same type of code, just not ES Modules. I want to use ES Modules because it is what my database functions are written in.
Server.js:
import express from 'express';
import { createServer } from 'http';
import next from 'next';
import models from './server/models';
import { genStock } from './server/lib/functions';
import { Server } from 'socket.io';
const port = parseInt(process.env.PORT || '3000', 10);
const dev = process.env.NODE_ENV !== 'production';
const nextApp = next({ dev });
const nextHandler = nextApp.getRequestHandler();
const app = express();
const server = createServer(app);
const io = new Server(server);
const Users = models.users;
io.use(async (socket, next) => {
const err = new Error('Unauthorized');
err.data = { message: 'Unauthorized, please try again later.' };
try {
if (!socket.handshake.auth.token) return next(err);
let user = await Users.findOne({
where: {
socket_token: socket.handshake.auth.token,
},
});
if (!user) {
console.log('unauthenticated socket');
socket.disconnect();
next(err);
}
await Users.update(
{ socket_id: socket.id },
{
where: {
socket_token: socket.handshake.auth.token,
},
},
);
next();
} catch (e) {
console.log(e);
next(e);
}
});
io.on('connection', async (socket) => {
// Works fine
const stock = await genStock();
socket.emit('updateStock', stock);
});
// Fails with address already in use :::3000
export async function refreshStock() {
const stock = await genStock();
io.emit('updateStock', stock);
}
nextApp.prepare().then(async () => {
app.all('*', (req, res) => nextHandler(req, res));
server.listen(port, () => {
console.log(`> Ready on http://localhost:${port}`);
});
});
This is meant to refresh the stock after a seller deactivates their account and sends all users the new stock.
/api/seller/deactivate
....
await refreshStock();
....
I figured it out, I just split up the WebSocket server and the next.js one. I have whitelisted local IPs that may appear to only allow server-to-server communication. Although I don't think this is full-proof as there is most likely a better way to have this type of communication but for now it works.
/**
* This server cannot be imported in /api folders, it won't work.
* Although it can import other functions
* */
import express from 'express';
import { createServer } from 'http';
import session from 'express-session';
import { Server } from 'socket.io';
import { genStock } from './server/lib/stockFunctions';
import { sessionStore } from './server/lib/session';
import passport from './server/lib/passport';
import models from './server/models';
const authorizedIPs = ['::1', '127.0.0.1', '::ffff:127.0.0.1'];
const Users = models.users;
const app = express();
const httpServer = createServer(app);
const io = new Server(httpServer, {
cors: {
origin: `http://localhost:3000`,
methods: ['GET', 'POST'],
credentials: true,
},
});
const wrap = (middleware) => (socket, next) => middleware(socket.request, {}, next);
io.use(
wrap(
session({
secret: "---",
resave: false,
saveUninitialized: true,
cookie: {
httpOnly: true,
secure: process.env.NODE_ENV === 'production',
path: '/',
sameSite: 'lax',
},
store: sessionStore,
}),
),
);
io.use(wrap(passport.initialize()));
io.use(wrap(passport.session()));
io.use(async (socket, next) => {
const err = new Error('Unauthorized');
err.data = { message: 'Unauthorized, please try again later.' };
try {
const user = socket.request.user;
if (!user) return next(err);
await Users.update(
{ socket_id: socket.id },
{
where: {
id: user.id,
},
},
);
next();
} catch (e) {
console.log(e);
next(e);
}
});
io.on('connection', async (socket) => {
const stock = await genStock();
socket.emit('updateStock', stock);
});
app.post('/refresh-stock', async function (req, res) {
const ip = req.ip;
if (!authorizedIPs.includes(ip)) {
console.log(ip);
return res.status(401).json({ success: false });
}
const newStock = await genStock();
io.emit('updateStock', newStock);
return res.status(200).json({ success: true });
});
httpServer.listen(3001);
console.log(`> Websockets ready on http://localhost:3001`);
My chrome extension was working perfectly until recently.
I originally received a error message of
required same site none and secure in the header
I then added to my express.session config,
samesite:none, secure:true
Now instead of that error, I am unable to gain access to my website by login in with my chrome extension, which I believe is due to socket.io not maintaining the authentication cookie.
My express server is as below,
const config = require('../../config');
const express = require('express');
const app = express();
const server = require('http').createServer(app);
const io = require('socket.io')(server, { wsEngine: 'ws' });
const mysql = require('mysql');
const expressSession = require('express-session');
const ExpressMysqlSessionStore = require('express-mysql-session')(expressSession);
const sharedsession = require('express-socket.io-session');
const path = require('path');
const utils = require('./utils');
// remove from header "X-Powered-By: Express"
app.disable('x-powered-by');
server.listen(config.serverParams.port, config.serverParams.address, () => {
console.log(`Server running at http://${server.address().address}:${server.address().port}`);
});
/* DATABASE */
global.db = mysql.createConnection(config.db);
db.connect();
/* DATABASE */
/* SESSION */
const sessionStore = new ExpressMysqlSessionStore(config.sessionStore, db);
const session = expressSession({
...config.session,
store: sessionStore,
});
app.use(session);
/* SESSION */
app.use(express.static(config.frontendDir));
app.get([
'/signup',
'/stats',
'/pay',
], (req, res) => res.sendFile(path.join(`${config.frontendDir}${req.path}.html`)));
io.use(sharedsession(session, {
autoSave: true
}));
io.on('connection', socket => {
socket.use((packet, next) => {
if (packet[0]) {
console.log('METHOD:', packet[0]);
const sessionData = socket.handshake.session.user;
const noSessionNeed = [ 'login', 'signup', 'checkAuth' ].includes(packet[0]);
let error;
if ( ! sessionData && ! noSessionNeed) error = { code: -1, message: 'You need to login in extension!' };
if (error) return next(new Error(JSON.stringify(error)));
else next();
}
});
const auth = require('./auth')(socket);
socket.on('checkAuth', auth.checkAuth);
socket.on('login', auth.login);
socket.on('signup', auth.signup);
socket.on('logout', auth.logout);
const users = require('./users')(socket);
socket.on('users.get', users.get);
const sentiment = require('./sentiment')(socket);
socket.on('sentiment.get', sentiment.get);
socket.on('sentiment.set', sentiment.set);
socket.on('disconnect', () => {
});
});
And the config file is somewhat like this,
config.session = {
// globals.config.express.sessionSecret
resave: true,
saveUninitialized: true,
cookie: {
maxAge: 86400000,
/* FOR WORK ON LOCALHOST
secure: true,
sameSite: 'lax', */
sameSite:"None",
secure:true,
domain: '.xx.xx',
},
Here is how the authentication is done with the socket.io
const passport = require('passport');
/* PASSPORT */
require('./passport')(passport); // pass passport for configuration
/* app.use(passport.initialize());
app.use(passport.session()); */
/* PASSPORT */
const utils = require('./utils');
const bcrypt = require('bcrypt');
const saltRounds = 10;
module.exports = socket => {
this.checkAuth = fn => {
if (fn) fn();
};
this.login = (params, fn) => {
passport.authenticate('local-login', (err, user) => {
const response = {};
if (user) {
socket.handshake.session.user = user;
socket.handshake.session.save();
response.message = 'Your successful login!';
response.data = {
id: user.id,
username: user.username,
};
}
else if (err) {
response.error = {
code: err,
message: ''
};
if (err == -1) response.error.message = 'Incorrect username or password!';
}
if (fn) fn(response);
})({ body: params });
},
// socket.on('signup', (params, fn) => {
this.signup = (params, fn) => {
passport.authenticate('local-signup', (err, user) => {
const response = {};
if (user) {
console.log('signup', user);
response.message = 'Your successful signup!';
}
else if (err) {
response.error = {
code: err,
message: ''
};
if (err == -1) response.error.message = 'User alreay exist!';
}
if (fn) fn(response);
})({ body: params });
};
// socket.on('logout', fn => {
this.logout = fn => {
delete socket.handshake.session.user;
};
return this;
};
utils
module.exports = socket => {
// socket.on('users.get', fn => {
this.get = fn => {
if (fn) {
const response = {};
response.data = {
id: socket.handshake.session.user.id,
username: socket.handshake.session.user.username,
};
fn(response);
}
};
return this;
};
Would love to be able to solve this issue :P
Thanks!
This was solved by using a JWT token, separate from this, to solve the issue of the socket.io session.s
Maybe it has to do with the signup function? I think there's no sign up option for first-comers. Try making if (user) -> if (!user)
I have an array of drag'n'dropped files inside angular component. I would like to make a POST request to http://some.url for each of them. I'm trying to do the following:
drop.component.ts
public drop(event) {
* somehow set droppedFiles *
let observables = [];
this.droppedFiles.forEach(file => observables.push(this.uploadFile(file)));
forkJoin(observables);
}
public uploadFile(image) {
return this.imagesService.saveImage(image, this.tigerId).pipe(first()).subscribe(
(data: ISaveImageResponse) => {
console.log(data);
return;
},
error => {
console.error(error);
return;
}
);
}
images.service.ts
public saveImage(image: File): Observable<ISaveImageResponse> {
let imageInfo = {
name: null, type: null, image: null
};
imageInfo.name = [image.name, Validators.required];
imageInfo.type = [image.type, Validators.required];
imageInfo.image = null;
let form = this.formBuilder.group(imageInfo);
form.get('image').setValue(image);
const formModel = this.prepareFormData(form);
return this.http.post<any>(
'http://some.url',
formModel
).pipe(
map((imageInfo: any) => {
return imageInfo
}),
catchError((error, caught) => {
return EMPTY;
})
);
}
If I drop single file, this works fine. But if there are multiple files, requests become pending but I can't see them logged to server (which is express.js server).
What is the problem?
UPDATE
I've updated code to be actual: now uploadImage() returns Observable and requests are called from forkJoin()
UPDATE 2
After some time requests being pending I get the following error in server console:
(node:1291) MaxListenersExceededWarning: Possible EventEmitter memory leak detected.
11 field listeners added. Use emitter.setMaxListeners() to increase limit
But no info about request happening at all (for any request I do, for example console.log('POST /images');)
UPDATE 3
server-side code for handling POST requests:
server.js
const server = express();
const fs = require("fs");
const path = require('path');
const passport = require('passport');
const session = require('express-session');
const RedisStore = require('connect-redis')(session);
server.use(
session({
store: new RedisStore({
url: config.redisStore.url
}),
secret: config.redisStore.secret,
resave: false,
saveUninitialized: false
})
);
server.use( passport.initialize() );
server.use( passport.session() );
server.use( cors({ origin: '*' }) );
server.use( bp.json() );
server.use( express.static('uploads') );
server.use( require('./image.routes') );
const port = 9901;
server.listen(port, () => {
const dir = __dirname + '/uploads';
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir);
}
console.log('We are live on ' + port);
});
image.routes.js
const fs = require('fs');
const formidable = require('express-formidable');
const path = require('path');
let router = express.Router();
router.post('/images',
formidable({
encoding: 'utf-8',
uploadDir: path.resolve(__dirname, 'uploads'),
multiples: true,
keepExtensions: true
}),
(req, res, next) => {
console.log('\nPOST /images');
const image = req.fields;
const data = req.files;
image.path = data.image.path;
const file = fs.createReadStream(image.path);
createImage(image).then( // createImage() saves image info to db
result => {
if (result) {
res.status(200).send(result);
} else {
console.error("Cannot save image");
res.status(400).send("Cannot save image");
}
}).catch(e => console.error(e.stack));
});
module.exports = router;
You cant use Promise.all to handle Rxjs requests.
You can use forkJoin to make multiple Observale request at once,
public drop(event) {
* somehow set droppedFiles *
let observables = []
this.droppedFiles.forEach(file => observables.push(this.uploadFile(file)));
Rx.Observable.forkJoin(observables)
}
Also your uploadFile function is not returning an observable
public uploadFile(image) {
return this.imagesService.saveImage(image, this.tigerId).pipe(first())
}
check out example number 5 here
Try using 'map' or 'for' instead forEach.
public drop(event) {
* somehow set droppedFiles *
Promise.all(this.droppedFiles.map(file => this.uploadFile(file)));
}
I am trying to a simple get request using the Fetch library in React.
I have the following code (shortened for brevity)
class App extends Component {
constructor(props) {
super(props);
this.state = {
username: "",
password: "",
stories: [],
isLoggedIn: false
};
}
getAllStories = async () => {
let res = await fetch(storiesAPI, {header ('Access-Control-Allow-Origin: *')})
let json = await res.json()
await this.setState({stories: json})
console.log(this.state.stories)
}
componentDidMount (){
if (localStorage.token) {
this.setState({
isLoggedIn: true
});
} else {
this.setState({
isLoggedIn: false
});
}
this.getAllStories();
}
render() {
return (
<div>
<h1>Hi</h1>
</div>
)
}
export default App;
As soon as the component mounts I get a failed to fetch error, which I initially thought was because of a CORS error,
because currently my server.js file looks like so:
const express = require('express')
const parser = require('body-parser')
const cors = require('cors')
const passport = require('./config/passport')()
const app = express()
var corsOptions = {
origin: "*",
optionsSuccessStatus: 200 // some legacy
browsers (IE11, various SmartTVs) choke on 204
};
app.set('port', process.env.PORT || 4000)
app.use(cors(corsOptions));
app.use(parser.json())
app.use(passport.initialize())
app.use(require('./routes/index'))
app.listen(app.get('port'), () => {
console.log('Server listening on port ' +
app.get('port'))
})
I thought that I was doing everything right, but the following is what I've tried:
1.fetch headers from let res = await fetch(storiesAPI, {header ('Access-Control-Allow-Origin: *')}) to let res = await fetch(storiesAPI, { credentials: "same-origin" })
Removing the CORS option
3.Adding a proxyurl in reference to a stack overflow post