I'm creating a website using NextJS and Docker so that I can easily deploy it. I used npx-create-next-app to initialize it and used this Dockerfile (slightly modified) to containerize it. Since I wanted to use SSL with my server without going through the hassle of setting up a proxy, I followed this article, and setup the custom server.
This worked fine when I ran it outside of a docker container, and performed as expected, serving over HTTPS. However when I containerized it, and tried to open the webpage over HTTPS, I came up with SSL_ERROR_RX_RECORD_TOO_LONG, but I could open the page using just HTTP (which I could not do when running outside of a container). Some googling led me to this question, from which I concluded that when running outside of a docker container, the custom server runs the server over HTTPS, as expected, however when I containerize it, it starts running HTTP, even though no code has been changed.
I'd expect the behavior to be the same when running locally or containerized.
At first I assumed this was due to invalid key and cert values in httpsOptions however I wasn't able to find anything that would make them invalid, and I don't see how that would cause this strange behavior. I tried changing the Docker run environment from node:alpine-16 to just node:latest to see if it had something to do with the parent image, but that was fruitless.
One other minor issue I had is that console.log does not seem to output to the container's log for some reason, I tried googling this but didn't find much of anything pertaining to it. This has made debugging much harder as I can't really output any debug data. The only log I get when running inside of a container is Listening on port 3000 url: http://localhost:3000, which I assume is output by some library/package as it isn't anywhere in my code.
Here is my custom server code in case it would be helpful:
const https = require('https');
const fs = require('fs');
const { parse } = require('url');
const next = require('next');
const dev = process.env.NODE_ENV !== 'production';
const hostname = "127.0.0.1";
const port = process.env.PORT || 3000
const app = next({ dev, hostname, port })
const handle = app.getRequestHandler()
const httpsOptions = {
key: fs.readFileSync('./cert/privkey.pem'),
cert: fs.readFileSync('./cert/fullchain.pem')
};
app.prepare().then(() => {
https.createServer(httpsOptions, async (req, res) => { // When running on docker this creates an HTTP server instead of HTTPS
const parsedUrl = parse(req.url, true)
const { pathname, query } = parsedUrl
await handle(req, res, parsedUrl)
}).listen(port, (err) => {
if(err) throw err
console.log(`Ready on https://localhost:${port}`)
})
})
Link to a reproducible example here.
The thing is, based on your sample repo, that your server.js file that is in the root of your repo gets overwritten in the image because of this line in the Dockerfile:
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
So the actual server.js that is running in the container is the server.js that is created by the yarn build command and it looks like this (you can exec into the container and see it for yourself):
const NextServer = require('next/dist/server/next-server').default
const http = require('http')
const path = require('path')
process.env.NODE_ENV = 'production'
process.chdir(__dirname)
// Make sure commands gracefully respect termination signals (e.g. from Docker)
// Allow the graceful termination to be manually configurable
if (!process.env.NEXT_MANUAL_SIG_HANDLE) {
process.on('SIGTERM', () => process.exit(0))
process.on('SIGINT', () => process.exit(0))
}
let handler
const server = http.createServer(async (req, res) => {
try {
await handler(req, res)
} catch (err) {
console.error(err);
res.statusCode = 500
res.end('internal server error')
}
})
const currentPort = parseInt(process.env.PORT, 10) || 3000
server.listen(currentPort, (err) => {
if (err) {
console.error("Failed to start server", err)
process.exit(1)
}
const nextServer = new NextServer({
hostname: 'localhost',
port: currentPort,
dir: path.join(__dirname),
dev: false,
customServer: false,
conf: {"env":{},"webpack":null,"webpackDevMiddleware":null,"eslint":{"ignoreDuringBuilds":false},"typescript":{"ignoreBuildErrors":false,"tsconfigPath":"tsconfig.json"},"distDir":"./.next","cleanDistDir":true,"assetPrefix":"","configOrigin":"next.config.js","useFileSystemPublicRoutes":true,"generateEtags":true,"pageExtensions":["tsx","ts","jsx","js"],"target":"server","poweredByHeader":true,"compress":true,"analyticsId":"","images":{"deviceSizes":[640,750,828,1080,1200,1920,2048,3840],"imageSizes":[16,32,48,64,96,128,256,384],"path":"/_next/image","loader":"default","loaderFile":"","domains":[],"disableStaticImages":false,"minimumCacheTTL":60,"formats":["image/webp"],"dangerouslyAllowSVG":false,"contentSecurityPolicy":"script-src 'none'; frame-src 'none'; sandbox;","remotePatterns":[],"unoptimized":false},"devIndicators":{"buildActivity":true,"buildActivityPosition":"bottom-right"},"onDemandEntries":{"maxInactiveAge":15000,"pagesBufferLength":2},"amp":{"canonicalBase":""},"basePath":"","sassOptions":{},"trailingSlash":false,"i18n":{"locales":["en"],"defaultLocale":"en"},"productionBrowserSourceMaps":false,"optimizeFonts":true,"excludeDefaultMomentLocales":true,"serverRuntimeConfig":{},"publicRuntimeConfig":{},"reactStrictMode":true,"httpAgentOptions":{"keepAlive":true},"outputFileTracing":true,"staticPageGenerationTimeout":60,"swcMinify":true,"output":"standalone","experimental":{"middlewarePrefetch":"flexible","optimisticClientCache":true,"manualClientBasePath":false,"legacyBrowsers":false,"newNextLinkBehavior":true,"cpus":7,"sharedPool":true,"profiling":false,"isrFlushToDisk":true,"workerThreads":false,"pageEnv":false,"optimizeCss":false,"nextScriptWorkers":false,"scrollRestoration":false,"externalDir":false,"disableOptimizedLoading":false,"gzipSize":true,"swcFileReading":true,"craCompat":false,"esmExternals":true,"appDir":false,"isrMemoryCacheSize":52428800,"fullySpecified":false,"outputFileTracingRoot":"","swcTraceProfiling":false,"forceSwcTransforms":false,"largePageDataBytes":128000,"enableUndici":false,"adjustFontFallbacks":false,"adjustFontFallbacksWithSizeAdjust":false,"trustHostHeader":false},"configFileName":"next.config.js"},
})
handler = nextServer.getRequestHandler()
console.log(
'Listening on port',
currentPort,
'url: http://localhost:' + currentPort
)
})
And as you see it starts a http server not a https. Also this is why the console.log("lksdfjls"); in your own server.js will not get executed.
What I would suggest is to leave node as it is, running on http://localhost:3000 and set up a reverse proxy that would forward incoming requests to this node backend that is accessible only from the reverse proxy. And of course reverse proxy would handle TLS termination. A docker compose setup would be more convenient for this so you could put the reverse proxy container (nginx for example) in the compose project too and map a directory from the docker host where your cert files are stored into the reverse proxy container at runtime - DO NOT BAKE CERTS OR ANY OTHER SECRETS INTO ANY IMAGE, not even if it is an internally used image only because it could leak out accidentally any time.
Also you could just manually run the two container with docker run but compose would make life easier it has a lot of capabilities for example you could scale compose services up and down so your backend service would run not in one but many containers. But if this would be a high load and/or business critical production stuff then you are better off with a better (real) container orchestrator like kubernetes, docker swarm, nomad etc but today as I see it the de facto container orchestrator is kubernetes.
I am using Expressjs and the Auth0 API for authentication and ReactJs for client side.
Because of the limitations of the Auth0 API (spoke with their team) I am sending updated user details to my backend and then using app.set() to be able to use the req.body in another route.
I need to call the app.patch() route automatically after the app.post() route has been hit.
The end goal is that the users data will be updated and shown client side.
const express = require('express');
const cors = require('cors');
const path = require('path');
const app = express();
require('dotenv').config()
const { auth } = require("express-openid-connect");
app.use(express.json());
app.use(cors());
app.use(express.static(path.join(__dirname, 'build')));
app.use(
auth({
issuerBaseURL: process.env.AUTH0_ISSUER_BASE_URL,
baseURL: process.env.BASE_URL,
clientID: process.env.AUTH0_CLIENT_ID,
secret: process.env.SESSION_SECRET,
authRequired: false,
auth0Logout: true,
})
);
app.get('/', async (req, res) => {
res.sendFile(path.join(__dirname, 'build', 'index.html'));
});
app.get('/api', async (req, res) => {
const stripe = require('stripe')(`${process.env.REACT_APP_Stripe_Live}`);
const invoice = await stripe.invoices.list({
limit: 3,
});
res.json(invoice);
});
app.post('/updateuser', (req, ) => {
app.set('data', req.body);
})
app.patch(`https://${process.env.AUTH0_ISSUER_BASE_URL}/api/v2/users/:id`,(req,res) => {
let val = app.get('data');
req.params = {id: val.id};
console.log(req.params);
})
app.listen(process.env.PORT || 8080, () => {
console.log(`Server listening on 8080`);
});
I'd suggest you just take the code from inside of app.patch() and make it into a reusable function. Then it can be called from either the app.patch() route directly or from your other route that wants to do the same funtionality. Just decide what interface for that function will work for both, make it a separate function and then you can call it from both places.
For some reason (which I don't really understand, but seems to happen to lots of people), people forget that the code inside of routes can also be put into functions and shared just like any other Javascript code. I guess people seems to think of a route as a fixed unit by itself and forget that it can still be broken down into components and those components shared with other code.
Warning. On another point. This comment of yours sounds very wrong:
and then using app.set() to be able to use the req.body in another route
req.body belongs to one particular user. app.set() is global to your server (all user's requests access it). So, you're trying to store temporary state for one single user in essentially a global. That means that multiple user's request that happen to be in the process of doing something similar will trounce/overwrite each other's data. Or worse, one user's data will accidentally become some other user's data. You cannot program a multi-user server this way at all.
The usual way around this is to either 1) redesign the process so you don't have to save state on the server (stateless operations are generally better, if possible) or 2) Use a user-specific session (like with express-session) and save the temporary state in the user's session. Then, it is saved separately for each user and one user's state won't overwrite anothers.
If this usage of app.set() was to solve the original problem of executing a .patch() route, then the problem is solved by just calling a shared function and passing the req.body data directly to that shared function. Then, you don't have to stuff it away somewhere so a later route can use it. You just execute the functionality you want and pass it the desired data.
I'm struggling with serving a build created with "create-react-app" using Express with Helmet. I'm getting several errors in the explorer console related to Content Security Policy:
csp-errors
Of course, it isn't showing the app. I noticed that if a remove Helmet as middleware in Express it works but that's not the solution I want. This is my server code:
const express = require('express');
const helmet = require('helmet');
const cors = require('cors');
const morgan = require('morgan');
const bodyParser = require('body-parser');
/**
* Server Configuration
*/
const whitelist = [];
const app = express();
// Express Configurations
// Enable reverse proxy support in Express. This causes the the "X-Forwarded-Proto" header field to be trusted so its
// value can be used to determine the protocol. See // http://expressjs.com/api#app-settings for more details.
app.enable('trust proxy');
app.use(morgan('dev')); // Log every request to the console
app.use(helmet()); // Configure secure Headers
app.use(bodyParser.urlencoded({ extended: false })); // Enable parsing of http request body
app.use(bodyParser.json());
// CORS Configuration
const corsOptions = {
origin: (origin, callback) => {
if (whitelist.indexOf(origin) !== -1 || !origin) {
callback(null, true);
} else {
callback(new Error('Not allowed by CORS'));
}
},
};
app.use(cors(corsOptions)); // Allow CORS
/**
* Launcher method
*/
app.start = () => {
// start node server
const port = process.env.PORT || 3000;
app.listen(port, () => {
console.log(`App UI available http://localhost:${port}`);
console.log(
`Swagger UI available http://localhost:${port}/swagger/api-docs`,
);
});
};
/**
* App Initialization
*/
function initializeApp(readyCallback) {
readyCallback(null, app);
}
module.exports = (readyCallback) => {
initializeApp(readyCallback);
};
Can anyone give me a hand? Thanks in advance!
Helmet maintainer here.
This is happening because of something called Content Security Policy, which Helmet sets by default. To solve your problem, you will need to configure Helmet's CSP.
MDN has a good documentation about CSP which I would recommend reading for background. After that, take a look at Helmet's README to see how to configure its CSP component.
To give some help specific to this question, let's take a look at one error you're seeing:
Content Security Policy: This page's settings blocked the loading of a resource at inline ("script-src").
This error is telling you that the script-src directive of your CSP does not allow inline JavaScript, and so it was blocked.
This is considered "inline" JavaScript:
<script>console.log('hello world!')</script>
This, however, is not:
<script src="/foo.js"></script>
There are several ways to fix this:
Add a hash or nonce to the inline <script> and use that in your CSP. See this example on MDN for help.
Refactor your app to avoid inline scripts entirely.
Update your CSP to allow unsafe inline scripts. You'd do something like this:
app.use(
helmet({
contentSecurityPolicy: {
directives: {
...helmet.contentSecurityPolicy.getDefaultDirectives(),
"script-src": ["'self'", "'unsafe-inline'", "example.com"],
},
},
})
);
Note that this is considered unsafe.
Disable CSP. This is the most dangerous option so I don't recommend it.
app.use(
helmet({
contentSecurityPolicy: false,
})
);
Your other errors, such as the fonts.googleapis.com error, refer to default-src, which is the fallback if a directive is not specified.
In summary: to solve your problem, you will need to tell Helmet to configure your CSP.
Got here via google with the same question. I didn't want to lower any of the security settings in helmet so I changed my react build config. Simply add the line
INLINE_RUNTIME_CHUNK=false
to your .env in the react app root directory. Then when you run
npm run build to build the app, all inlined scripts will be removed and will no longer violate the CSP. This does add one extra initial HTTP GET request when first loading the site but seems to be worth the security benefits in my opinion.
Here is a third solution. Change your build script in package.json to following:
"build": "GENERATE_SOURCEMAP=false node scripts/build.js"
I upload a zip file, and I have the following two files:
// server.js
'use strict';
const express = require('express');
const bodyParser = require('body-parser');
const addRequestId = require('express-request-id')();
const app = express();
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
app.use(addRequestId);
app.listen(8000);
exports.app = 'app';
and
// main entry point for the handler
'use strict';
const server = require('./server').app;
exports.handler = function(event, context, callback) {
// Do stuff
}
As you can see, I'd like to create an internal server that I can use inside my lambda. This times out, and I get an error message:
{"errorMessage":"2017-03-09T07:52:01.439Z 45210dff-049d-11e7-84cc-8367ae894495 Task timed out after 3.00 seconds"}
The problem happens right at app.listen(8000) (i.e. if I comment this line out, then the function runs.
Can I now create a server inside my lambda?
EDIT
Please do not comment on how a lambda is intended for a short/one time operation. The question is not about a correct implementation and usage of lambda; I know you are suppose to use API Gateway for routing. Of course this is not the full example. For the sake of this equestion, I need a server to run for the 5s that I need to execute this lambda. Is this not allowed within AWS Lambda?
Taking the simple example from Union, I am wondering where I can put configuration code that usually goes in app.configure, like passport.js:
app.configure(function() {
// Initialize Passport! Also use passport.session() middleware, to support
// persistent login sessions (recommended).
app.use(passport.initialize());
app.use(passport.session());
});
Any ideas? server and router don't accept use().
Union appears to use the before collection for this:
var server = union.createServer({
before: [
connect.session({ secret: 'keyboard cat' }), // for `passport.session()`
passport.initialize(),
passport.session(),
// etc.
]
});
From the "API" documentation:
#option before {Array}
The `before` value is an array of middlewares, which are used to route and serve incoming
requests. For instance, in the example, `favicon` is a middleware which handles requests
for `/favicon.ico`.
Union supports connect middlewares via the before property, as previously mentioned by others. However, union does not handle application configuration; flatiron does. The api, however, is significantly different from express.
For example, configuring an application may look something like this:
var path = require('path'),
flatiron = require('flatiron'),
app = flatiron.app,
plugins = flatiron.plugins,
connect = require('connect'), // most connect middlewares work with flatiron ootb
passport = require('passport');
// Use flatiron's http plugin (not the same as a middleware!)
app.use(plugins.http);
// configuration consists of key/value pairs, not of function blocks associated with
// certain "environments".
// Here's *a* way you can handle environment-based configs; there are others!
app.config.file(path.resolve(
__dirname,
'config',
(process.env.NODE_ENV || 'config') + '.json'
));
// Use our config to set the secret
app.http.before.push(connect.session({
secret: app.config.get('secret') || 'keyboard cat' //default
}))
app.http.before.push(passport.initialize());
app.http.before.push(passport.session());
I haven't tried running this example (I'm sure there are more details here) but hopefully this gives you an idea.
I just built a wrapper to integrate Passport.js with Flatiron.js.
https://npmjs.org/package/flatiron-passport
https://github.com/travist/flatiron-passport
Please read the README.md on how to use it and apply it to your application.
I have tested it on LocalStrategy, but it should work for other strategies.
Please let me know otherwise.