I'm creating a website using NextJS and Docker so that I can easily deploy it. I used npx-create-next-app to initialize it and used this Dockerfile (slightly modified) to containerize it. Since I wanted to use SSL with my server without going through the hassle of setting up a proxy, I followed this article, and setup the custom server.
This worked fine when I ran it outside of a docker container, and performed as expected, serving over HTTPS. However when I containerized it, and tried to open the webpage over HTTPS, I came up with SSL_ERROR_RX_RECORD_TOO_LONG, but I could open the page using just HTTP (which I could not do when running outside of a container). Some googling led me to this question, from which I concluded that when running outside of a docker container, the custom server runs the server over HTTPS, as expected, however when I containerize it, it starts running HTTP, even though no code has been changed.
I'd expect the behavior to be the same when running locally or containerized.
At first I assumed this was due to invalid key and cert values in httpsOptions however I wasn't able to find anything that would make them invalid, and I don't see how that would cause this strange behavior. I tried changing the Docker run environment from node:alpine-16 to just node:latest to see if it had something to do with the parent image, but that was fruitless.
One other minor issue I had is that console.log does not seem to output to the container's log for some reason, I tried googling this but didn't find much of anything pertaining to it. This has made debugging much harder as I can't really output any debug data. The only log I get when running inside of a container is Listening on port 3000 url: http://localhost:3000, which I assume is output by some library/package as it isn't anywhere in my code.
Here is my custom server code in case it would be helpful:
const https = require('https');
const fs = require('fs');
const { parse } = require('url');
const next = require('next');
const dev = process.env.NODE_ENV !== 'production';
const hostname = "127.0.0.1";
const port = process.env.PORT || 3000
const app = next({ dev, hostname, port })
const handle = app.getRequestHandler()
const httpsOptions = {
key: fs.readFileSync('./cert/privkey.pem'),
cert: fs.readFileSync('./cert/fullchain.pem')
};
app.prepare().then(() => {
https.createServer(httpsOptions, async (req, res) => { // When running on docker this creates an HTTP server instead of HTTPS
const parsedUrl = parse(req.url, true)
const { pathname, query } = parsedUrl
await handle(req, res, parsedUrl)
}).listen(port, (err) => {
if(err) throw err
console.log(`Ready on https://localhost:${port}`)
})
})
Link to a reproducible example here.
The thing is, based on your sample repo, that your server.js file that is in the root of your repo gets overwritten in the image because of this line in the Dockerfile:
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
So the actual server.js that is running in the container is the server.js that is created by the yarn build command and it looks like this (you can exec into the container and see it for yourself):
const NextServer = require('next/dist/server/next-server').default
const http = require('http')
const path = require('path')
process.env.NODE_ENV = 'production'
process.chdir(__dirname)
// Make sure commands gracefully respect termination signals (e.g. from Docker)
// Allow the graceful termination to be manually configurable
if (!process.env.NEXT_MANUAL_SIG_HANDLE) {
process.on('SIGTERM', () => process.exit(0))
process.on('SIGINT', () => process.exit(0))
}
let handler
const server = http.createServer(async (req, res) => {
try {
await handler(req, res)
} catch (err) {
console.error(err);
res.statusCode = 500
res.end('internal server error')
}
})
const currentPort = parseInt(process.env.PORT, 10) || 3000
server.listen(currentPort, (err) => {
if (err) {
console.error("Failed to start server", err)
process.exit(1)
}
const nextServer = new NextServer({
hostname: 'localhost',
port: currentPort,
dir: path.join(__dirname),
dev: false,
customServer: false,
conf: {"env":{},"webpack":null,"webpackDevMiddleware":null,"eslint":{"ignoreDuringBuilds":false},"typescript":{"ignoreBuildErrors":false,"tsconfigPath":"tsconfig.json"},"distDir":"./.next","cleanDistDir":true,"assetPrefix":"","configOrigin":"next.config.js","useFileSystemPublicRoutes":true,"generateEtags":true,"pageExtensions":["tsx","ts","jsx","js"],"target":"server","poweredByHeader":true,"compress":true,"analyticsId":"","images":{"deviceSizes":[640,750,828,1080,1200,1920,2048,3840],"imageSizes":[16,32,48,64,96,128,256,384],"path":"/_next/image","loader":"default","loaderFile":"","domains":[],"disableStaticImages":false,"minimumCacheTTL":60,"formats":["image/webp"],"dangerouslyAllowSVG":false,"contentSecurityPolicy":"script-src 'none'; frame-src 'none'; sandbox;","remotePatterns":[],"unoptimized":false},"devIndicators":{"buildActivity":true,"buildActivityPosition":"bottom-right"},"onDemandEntries":{"maxInactiveAge":15000,"pagesBufferLength":2},"amp":{"canonicalBase":""},"basePath":"","sassOptions":{},"trailingSlash":false,"i18n":{"locales":["en"],"defaultLocale":"en"},"productionBrowserSourceMaps":false,"optimizeFonts":true,"excludeDefaultMomentLocales":true,"serverRuntimeConfig":{},"publicRuntimeConfig":{},"reactStrictMode":true,"httpAgentOptions":{"keepAlive":true},"outputFileTracing":true,"staticPageGenerationTimeout":60,"swcMinify":true,"output":"standalone","experimental":{"middlewarePrefetch":"flexible","optimisticClientCache":true,"manualClientBasePath":false,"legacyBrowsers":false,"newNextLinkBehavior":true,"cpus":7,"sharedPool":true,"profiling":false,"isrFlushToDisk":true,"workerThreads":false,"pageEnv":false,"optimizeCss":false,"nextScriptWorkers":false,"scrollRestoration":false,"externalDir":false,"disableOptimizedLoading":false,"gzipSize":true,"swcFileReading":true,"craCompat":false,"esmExternals":true,"appDir":false,"isrMemoryCacheSize":52428800,"fullySpecified":false,"outputFileTracingRoot":"","swcTraceProfiling":false,"forceSwcTransforms":false,"largePageDataBytes":128000,"enableUndici":false,"adjustFontFallbacks":false,"adjustFontFallbacksWithSizeAdjust":false,"trustHostHeader":false},"configFileName":"next.config.js"},
})
handler = nextServer.getRequestHandler()
console.log(
'Listening on port',
currentPort,
'url: http://localhost:' + currentPort
)
})
And as you see it starts a http server not a https. Also this is why the console.log("lksdfjls"); in your own server.js will not get executed.
What I would suggest is to leave node as it is, running on http://localhost:3000 and set up a reverse proxy that would forward incoming requests to this node backend that is accessible only from the reverse proxy. And of course reverse proxy would handle TLS termination. A docker compose setup would be more convenient for this so you could put the reverse proxy container (nginx for example) in the compose project too and map a directory from the docker host where your cert files are stored into the reverse proxy container at runtime - DO NOT BAKE CERTS OR ANY OTHER SECRETS INTO ANY IMAGE, not even if it is an internally used image only because it could leak out accidentally any time.
Also you could just manually run the two container with docker run but compose would make life easier it has a lot of capabilities for example you could scale compose services up and down so your backend service would run not in one but many containers. But if this would be a high load and/or business critical production stuff then you are better off with a better (real) container orchestrator like kubernetes, docker swarm, nomad etc but today as I see it the de facto container orchestrator is kubernetes.
I'm must say I'm very new to back end development,
I'm currently working on an exercise project of making a fake money poker website. I use Node.js socket.io/express-session/passport
At first, I mainly used express with a HTTP server listening on one port. Averagely Like this:
const express = require("express")
const app = express()
app.get('/home',connectEnsureLogin.ensureLoggedIn("/loginPage"),function(req, res) {
//console.log(req.user.username+": sessionId: "+req.sessionID);
return res.sendFile( __dirname+"/website/index.html");
}
);
const PORT = process.env.PORT || 5000;
app.listen(PORT, () => console.log("Poker site Server started on ${PORT})")
The website wasn't working very fast. When a client joined a poker table they needed to ask the server every second for new updates on the state of the game so that was a lot of HTTP requests coming into my server. So I decided without much theoretical certitude that it seemed like a good idea: To have the server use socket.io sockets to hand info for clients that are in poker tables, but when they are not in poker tables and are just browsing the site I use a HTTP server to handle their request. Code wise I feel I haven't really managed to do this correctly. My code with Express, express-session, and passport combined makes sure only to hand information to users authenticated. But since The socket.io servers seem totally separate from all the express code, they don't share the same authentication functionality as the express code. So I need to somehow link my express and socket.io code so I can check if a client is authenticated before handing him any info via sockets. here is the system I'm currently using I didn't put all my code but I tried to summarize the essential parts:
const express = require('express');
const app = express();
//i creat the http server that is somehow linked with my express app when this server is listening
//it will call express handling methods.
const http = require('http').Server(app);
const io = require('socket.io')(http);
const path = require("path");
const passport = require("passport");
const connectEnsureLogin = require('connect-ensure-login');
const AccountInfo = require("./AccountInfo").AcccountInfo;
const expressSession = require('express-session')({
secret: process.env.SESSION_SECRET,
resave: false,
saveUninitialized: false
});
//passport setup
passport.use(AccountInfo.createStrategy());
passport.serializeUser(AccountInfo.serializeUser());
passport.deserializeUser(AccountInfo.deserializeUser());
//body parser
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
//Sessions
app.use(expressSession);
//!!!!here is where I connect socket.io with the sessions i found this in another forum.
// thanks to this code I can access the session that a client is using when their socket connects.
io.use(function(socket, next) {
expressSession(socket.request, socket.request.res, next);
});
//so when a clients socket connects i save his socket.id to his session.
io.on('connection',function(socket) {
console.log(`socket.io connected: ${socket.id}`);
// save socket.io socket in the session
socket.request.session.socketio = socket.id;
socket.request.session.save();
});
//once the clients socket is connected directly after the clients sends a HTTP "PUT" request
//and this code answers it.
app.post('/Table/ConnectSocketToTable',Utilities.ensureLoggedIn(),function(req, res)
{
//I retrieve the socket using the socket.id I had saved in the session.
let socket = io.sockets.sockets.get(req.session.socketio);
let player = GetPlayerFromAnyTable(req.user.username);
if(player==null)//the player can't be in two tables at once
{
//since now we are in an express callback, express made sure that the client is indeed
//authenticated with the middle-ware: "Utilities.ensureLoggedIn()" also just before I made sure
//the client is not in another table. So we are good to go we can now link the socket to the table
//and have the client receive all the info about the state of his table
socket.join("table-"+req.session.passport.table);
req.user.socket = socket;
let table = GetTable(req.session.passport.table);
table.sitPlayer(req.user);
}
else
{
//the player is already connected so we just update his socket to a new one
player.requestUnseat=false;
player.account.socket =io.sockets.sockets.get(req.session.socketio);
}
socket.on('chatMessage', function(data,time) {
socket.to("table-"+req.session.passport.table).emit("chatMessage",req.user.username,data,time);
console.log(`send chat message : ${data}`);
});
socket.on('disconnect', function() {
GetTable(req.session.passport.table).requestUnsitUsername(req.user.username);
console.log(req.user.username +" was disconnected so now requesting unsit");
});
console.log("the socket of "+req.user.username+" has being connected to table-"+req.session.passport.table);
return res.sendStatus(200);
});
So for me, the way I'm doing this seems pretty bad since "app.post('/Table/ConnectSocketToTable'...)" and "io.on('connection',...)" are two different request listening functions I feel I should probably just do everything in one.
So should I do all the checks in the "io.on('connection',...)" function and somehow manage to make sure the client is authenticated within the callback of io.on('connection',callback) ?
or should I find a way to make the socket connection happen in the initial HTTP call the client uses to join a table, which is what I initially wanted?
But really I'm kinda lost because I'm telling myself maybe I don't even need Express anymore and I should just use socket.io for everything. I seem to clearly lack the general understanding that would allow me to know what approach I should be going for so any help is welcome. I started doing this self-made exercise to get into server-side development but also if there is any other recommended exercise to start up with back-end development I'm definitely interested in hearing about it.
From random testing I found out how to authenticate to my express session from the socket code you don't actually have to do it in the callback of io.on('connection',callback) you just need to add a few more middleware functions like this:
//connecting express sessions
io.use(function(socket, next) {
expressSession(socket.request, socket.request.res, next);
});
//connecting passport
io.use(function(socket, next) {
passport.initialize()(socket.request, socket.request.res, next);
});
//connecting passport sessions
io.use(function(socket, next) {
passport.session()(socket.request, socket.request.res, next);
});
//check if client is authenticated returns error if authentication failed
io.use((socket, next) => {
console.log("started socket Connection");
if(!socket.request.isAuthenticated&&socket.request.isAuthenticated())
{
socket.request.session.socketio = socket.id;
socket.request.session.save();
console.log("table "+socket.request.session.passport.table);
console.log("user.username "+socket.request.user.username);
console.log(`is authentificated`);
next();
}
else
{
console.log(`failed socket connection`);
next(new Error("unauthorized"));
}
});```
I'm using both Ubuntu and Visual Studio Code to launch my server program, they were both sucessfully taking and sending back replies from Postman a few days ago. The server code runs fine, and says the server is up and running at https://loccalhost:8080
But when I try to send a GET request from Postman, I get this error from Postman:
This is the environment I'm using:
And this is the error I get from my server program when it gets a request:
How ther server is configured:
require('dotenv').config()
const express = require("express");
const bodyParser = require("body-parser");
const app = express();
app.set("port", 8080);
app.use(bodyParser.json({ type: "application/json" }));
app.use(bodyParser.urlencoded({ extended: true }));
const Pool = require("pg").Pool;
const config = {
host: process.env.DB_HOST,
user: process.env.DB_USER,
password: process.env.DB_PASS,
database: "taskfour"
};
const pool = new Pool(config);
//HELLO WORLD
app.get("/hello", (req, res) => {
res.json({msg: "Hello, World!"});
});
app.listen(app.get("port"), () => {
console.log(`Find the server at http://localhost:${app.get("port")}`);
});
The server had previously been working fine, Postman was sending requests, doing tests, and my code was passing them. I didn't change much in the meanwhile, I'm not sure what changed. I've tried turning off my proxy server on Postman, but it hasnt' helped. Any help would be greatly appreciated.
Looks like the HTTP listening code is missing, for example:
app.listen(8080, function () {
console.log('App listening on port 8080.');
});
Wow, I didn't realize the postgres service wasn't running. I just needed to enter the command "sudo service postgresql start" in my terminal, and the requests work again.
Sorry for this noob question, student here and still learning
I'm trying to pass the request body of a POST request from server to client. I have an Arduino sensor making post requests with sensor data to an express server. The sensor data is inside the POST request body, and I push the data to an array called 'dataArray'. This part seems to be working.
My problem is that I'm now stuck on how to pass this data from the express server to a Vue component on the client side. Should I make a new route? I'm not asking anyone to write any code for me, I'm just hoping someone could point me in the right direction or suggest something, because I'm at a loss on exactly how I should go about doing this. Thank you.
server.js
var express = require("express")
var cors = require("cors")
var bodyParser = require("body-parser")
var app = express()
var mongoose = require("mongoose")
var Users = require("./routes/Users")
var port = process.env.PORT || 5000
var dataArray = []
app.use(bodyParser.json())
app.use(cors())
app.use(bodyParser.urlencoded({ extended: true }))
const mongoURI = 'my_connection_string'
mongoose.connect(mongoURI, { useNewUrlParser: true })
.then(() => console.log("MongoDB Connected"))
.catch(err => console.log(err))
app.use("/users", Users)
app.route("/api/:apikey1")
app.post("/api/:apikey1", function(request, response) {
var myData = request.body;
console.log(myData)
dataArray.push(myData)
response.send("Array Filled")
});
app.listen(port, function () {
console.log("Server is running on port: " + port)
})
If you want to keep it simple use the socket.io library.
It is available for both client and server, you can use in your Vue client too.
Also, you won't need any extra route, just in your app.post("/api/:apikey1") route, use the emit method on socket.io library to broadcast the data as it is received from the sensors
Any data flow from the server to client must be first initiated on the client side, either by polling, using WebSockets or Server-sent events.
I am working on an exercise tracker app using the MERN stack. I have a react JS component that is meant to allow me to add a new user to a database after I press the submit button. I am using axios to send http requests from my front end to server endpoint on the backend. However I keep getting this error
POST https://localhost:5000/users/add net::ERR_CONNECTION_REFUSED
Uncaught (in promise) Error: Network Error at createError
(0.chunk.js:971) at XMLHttpRequest.handleError (0.chunk.js:466)
This is my server side code
const express = require('express');
const cors = require('cors');
const mongoose = require('mongoose');
//mongoose is whats going to help us connect to our mongoDB database
require('dotenv').config();
//this configures si we can have our environment variables in the dotenv file
const app = express();
const port = process.env.PORT || 5000;
//this is how we will create our express server
app.use(cors());
app.use(express.json());
//this is our middle ware this will allopw us to parse json
// cause the server will be sending s=and receiving json
const uri = process.env.ATLAS_URI;
mongoose.connect(uri, {useNewUrlParser: true, useCreateIndex: true, useUnifiedTopology:true});
//uri is where database is stored
const connection = mongoose.connection;
connection.once('open',() =>{
console.log("MongoDB database connection established successfully");
});
//once connection is open its going to log the message
const exercisesRouter = require('./routes/excercises');
const usersRouter = require('./routes/users');
//importing
app.use('/excercises',exercisesRouter);
app.use('/users',usersRouter);
//use files
//whenever somebody goes to route url and put /excersies at the end it will show
//everything in excercises and the same for users
app.listen(port,()=>{
console.log('Server is running on port: ' + port);
});
//this is what starts the server. It start listening to a certain port
This is my submit function
onSubmit(e){
e.preventDefault(); //prevents default html form behaviour taking place
const user = {
username: this.state.username,
};
console.log(user);
//sending user data to the backend with post request
//check user.js file in routes its sending a post request to the user.add api
axios.post('https://localhost:5000/users/add',user)
.then(res => console.log(res.data));
this.setState({
username: ''
});
}
This is my route
router.route('/add').post((req,res) => {
const username = req.body.username;
const newUser = new User({username});
//using unsername to create new user
newUser.save()
.then(() => res.json('User added')) //after user saved to DB return user added message
.catch(err => res.status(400).json('Error ' + err)); //if there is an error return error
});
check if your backend is also running on port 5000 u need to start your backend
I followed this tutorial as well, you have to start the backend and the front end. The problem was the front end is only running that's why you can see everything( not sure how he managed ) but I had to pull up a terminal and start the front end with -> npm start and the backend with -> nodemon server on a separate terminal tab
bro, I think you did just a little mistake in your address which is
https://localhost:5000/users/add not https change it to http and it will solve your problem
your address will be http://localhost:5000/users/add