I create an express app like this
const express = require('express')
const app = express();
app.use(express.static(path.join(__dirname, 'public')));
app.post('/close', async (_, res) => {
res.status(200);
res.end();
app.close();
});
module.exports = app;
I instantiate it in another module
const myApp = require('./app.js');
myApp.listen(port, () => {
console.log(`Started server on ${port}`);
});
I want the server to shut itself down when it receives a post request to /close. At the moment, I just get a app.close is not a function error.
I know I can close a server externally like this
const server = myApp.listen(port, () => {
console.log(`Started server on ${port}`);
});
server.close();
but I want to close the server on a post request to /close, how can I do that?
To get access to your server object, try using
req.connection.server
from within your route handler.
.close() makes the server stop listening for new connections. Already-established connections are not affected. The server object emits a 'close' event when all open connections have disconnected.
process.exit() stops your whole nodejs app.
So, this code might do what you want.
app.post('/close', (req, res, next) => {
res.status(200)
res.end()
const server = req.connection.server
if (server.listening) {
server.addEventListener('close', ev => {
console.log('server closed. See ya later alligator.')
process.exit(0)
})
console.log('closing server')
server.close()
}
});
If you need to get the server object from your app object (if getting it from your req object isn't good enough), you could put in a little middleware function like this.
app.use( function adornApp( req, res, next ) {
req.app.locals.server = req.connection.server
next()
} )
Then you'll be able to get your server from app.locals.server once your middleware is first invoked.
You could use the http-terminator package to close your server. The following should do the trick. The reason we use the http-terminator is because the server won't close if it is visited via a route.
const { createHttpTerminator } = require("http-terminator");
const { initApp, app } = require("./app.js");
const PORT = 3000;
const server = app.listen(PORT, () => {
console.log(`Started server on ${PORT}`);
});
const httpTerminator = createHttpTerminator({ server });
initApp(httpTerminator);
Inside the module:
const express = require("express");
const app = express();
const initApp = (httpTerminator) => {
app.get("/close", async (_, res) => {
res.json({ message: "we closed" });
await httpTerminator.terminate();
});
};
module.exports = { initApp, app };
Related
I have setup Nodejs to serve an HttpServer using ExpressJs. I also need to use ws WebSocket in order to connect to my device for fetching data.
My attempt is now this.
import express from 'express';
import cors from 'cors';
import http from 'http';
import { WebSocketServer } from 'ws';
const app = express();
app.use(cors());
//initialize a simple http server
const httpServer = http.createServer(app);
const wsServer = new WebSocketServer({ port: 7777});
wsServer.on('connection', function connection(ws, req) {
app.get('/', (req, res) => {
ws.send('{"msg":"getdata"}');
ws.on('message', function message(data) {
data = JSON.parse(data);
res.json(data);
});
});
//start our server
httpServer.listen(7778, () => {
console.log(`Server started on port ${httpServer.address().port} :)`);
});
});
The problem is that when the API is called more than once. An error code: 'ERR_HTTP_HEADERS_SENT' is thrown. I assume it is because the ws.on('message') is executed multiple times. So, I am trying to find a way to remove the listener but to no avail.
Is there any better way to do this? I just want to have a webserver that calls to another websocket in order to get data from a device.
For your code example to work, message on websocket must be sent after the / request is made. Because, before that, on message handler is not registered. Also, once handling the first request successfully, you cannot send a websocket message again. Because, when you do that, the res in the message handler is already completed. So, you will get ERR_HTTP_HEADERS_SENT. Thus proved :-)
So, your API calls must be like in the following pattern
/ Call #1
Websocket message #1
/ Call #2
Websocket message #2
If you do so, you will not get the ERR_HTTP_HEADERS_SENT error. Because, res is send once for every / request.
this will solve your error code: 'ERR_HTTP_HEADERS_SENT'
wsServer.on('connection', function connection(ws, req) {
app.get('/', (req, res) => {
//first This
ws.on('message', function message(data) {
data = JSON.parse(data);
res.json(data);
});
//then use this
ws.send('{"msg":"getdata"}');
});
//start our server
httpServer.listen(7778, () => {
console.log(`Server started on port ${httpServer.address().port} :)`);
});
});
var app = require('express')();
var http = require('http').Server(app);
var io = require('socket.io')(http);
var timeout = require('connect-timeout');
var uuid = require('uuidv4');
var _ = require('lodash');
app.use(timeout('10s'));
app.get('/', (req, res) => {
res.sendFile(__dirname + '/index.html');
});
let responses = []
io.on('connection', (socket) => {
socket.on('res', (e) => {
var obj = _.find(responses, r => r.id === e.id);
obj.res.send(e)
_.remove(responses, r => r.id === e.id);
})
})
app.get('/endpoint', (req, res) => {
const id = uuid()
io.emit('req', { id, ip: req.ip, header: req.headers, method: req.method });
responses.push({ id, res })
});
http.listen(3000);
If you want over multiple instance, you can use redis pub sub.
in my express server when I click on upload button or download button I want that my server work with file system(fs) without changing web page. how to implement this code in JS?
let express = require('express');
let Fs = require('fs');
let path = require('path');
let URL = require('url');
let app = express();
let router = express.Router();
const port = 2020;
app.use(express.static('Programs'));
app.use('/', router);
router.get('/' , (req, res) => {
res.sendFile(path.join(__dirname+'/views/UiOfServer.html'));
});
router.get('/todo/download' , (req, res) => {
res.send("download page");
});
router.get('/todo/upload' , (req, res) => {
res.send("upload page");
});
app.listen(port, (err, res) => {
if(err){
console.log(`Server Error: ${err}`);
}
else{
console.log(`server started on ${port}`);
}
})
If you don't want the current page in the browser to change or refresh when clicking a button you should use AJAX.
You should use upload and download endpoints as apis and call these apis from your webpage.
You can use libraries such as jquery, axios, fetch, etc to make your job easier
Example in jquery
$.get('/todo/download').done(response => console.log(response))
Do you mean that you don't want the server to read and write continuously? If it's true, try it
fs.readFileSync(your_data_url);
it should be at the top (maybe after let router = express.Router();) because it is synchronous, and it only runs once when you load web page
I'm using this code for my backend:
const express = require('express');
const app = express();
const socketIo = require("socket.io");
const io = socketIo(http);
io.on("connection", socket => {
console.log("New client connected");
socket.on("disconnect", () => console.log("Client disconnected"));
});
http.listen(3000, () => {
console.log('listening on *:3000');
});
When I run it, it outputs the message confirming it is listening. However, on a connection it does not send any messages to the console. I'm trying to listen for connections to a React app. I have tried using other code snippets for the connection function that also claim to work as I expected, however none that I have tried have worked, including the code in the official tutorial for socket.io.
Please can anyone help?
const express = require('express')
const app = express()
const PORT = 5000
// Get to http://localhost:5000
app.get("/", (request, response) => {
// Send back some data to the client
response.send("Hello world")
})
// Post to http://localhost:5000/getRandom
app.post("/getRandom", (req, res) => {
res.send(Math.random())
})
app.listen(PORT, () => console.log(`Server running on PORT ${PORT}`))
Instead of calling the parameters request and response, people use the short form of req and res
Now start this script and go to http://localhost:5000 and you will see "Hello world" in the HTML body. That's express, simple yet powerful :)
Sorry if I don't post the correct details, this is my first hands-on project after going through online tutorials.
I'm using React, node with axios to build a web app that captures status(available, in a meeting, lunch etc) and the time spent on each status.
The app works fine, it captures and writes the data onto the backend(JSON) however, I keep getting this error on the console.
POST https://localhost:5000/write net::ERR_CONNECTION_RESET
Uncaught (in promise) ERROR: Network Error
I've tried to look for a solution but can't find one that is similar to the tech-stack I used. Also, my lack of sufficient knowledge don't help either.
Any lead or read or solution will help.
pasting my code below:
My frontend code to push data into JSON file
const saveJson = (posts) => {
//api URL //end point from node server / express server
const url = "http://localhost:5000/write";
axios.post(url, posts).then((response) => {
//console.log(response);
}); };
The server.js code
const express = require("express");
const bodyParser = require("body-parser");
//calling packages
const fs = require("fs");
const morgan = require("morgan");
const cors = require("cors");
//Declare app
const app = express();
const port = 5000;
//middlewares
app.use(bodyParser.json());
app.use(morgan("dev"));
app.use(cors());
//default route for server
app.get("/", (req, res) =>
res.status(200).send({
message: "Server is running...",
})
);
const WriteTextToFileAsync = async (contentToWrite) => {
fs.writeFile("./src/data.json", contentToWrite, (err) => {
console.log(contenToWrite);
if (err) {
console.log(err);
} else {
console.log("Done writing to file...");
// res.json({ msg: "success" });
}
});
};
//Declare timerow/write route to accept incoming require with data
app.post("/write", async (req, res, next) => {
//take the body from incoming requestby using req.body and conver it into string
const requestContent = JSON.stringify(req.body);
await WriteTextToFileAsync(requestContent);
});
//404 route for server
app.use((req, res, next) =>
res.status(404).send({
message: "Could not find specified route requested...!",
})
);
//run server
app.listen(port, () => {
console.log(
`!!! server is running
!!! Listening for incoming requests on port ${port}
!!! http://localhost:5000
`
);
});
I want a server side generated page in next.js to be served as a file. So I wanted to grab the rendered content inside a custom server.js file:
const express = require('express');
const next = require('next');
const port = parseInt(process.env.PORT, 10) || 3000;
const dev = process.env.NODE_ENV !== 'production';
const app = next({dev});
const handle = app.getRequestHandler();
app.prepare().then(() => {
const server = express();
server.get('/', async (req, res) => {
const nextResponse = await app.renderToHTML(req, res, '/', req.query);
console.log('nextResponse', nextResponse);
console.log('res.body', res.body);
});
server.get('*', (req, res) => {
return handle(req, res);
});
server.listen(port, (err) => {
if (err) throw err;
console.log(`> Ready on http://localhost:${port}`);
});
});
Oddly enough every console.log returns null or undefined.
I thought that renderToHTML would just return the rendered HTML string. Is there any way to do this?
This one is a bit tricky but achievable.
The idea is to override res.end function in order to catch rendered HTML there. The tricky part is that Next.js gzips and streams response using the compression library that's overriding res.end somewhere in the middle of the handle function.
The compression library is initialized using the handleCompression function of the Next.js's Server object (which is accessible using the app.getServer()), so that function needs to get overridden too.
So it should be looking something like this:
const { parse } = require('url');
const next = require('next');
const express = require('express');
const dev = process.env.NODE_ENV !== 'production';
const app = next({ dev });
const port = process.env.PORT || 3000;
const handle = app.getRequestHandler();
app.prepare()
.then(() => {
const server = express();
server.get('*', async (req, res) => {
const parsedUrl = parse(req.url, true);
const nextServer = await app.getServer();
const _handleCompression = nodeServer.handleCompression.bind(nodeServer);
nextServer.handleCompression = (req, res) => {
_handleCompression(req, res);
const _resEnd = res.end.bind(res)
res.end = function (payload) {
console.log('Rendered HTML: ', payload);
return _resEnd(payload);
}
}
return handle(req, res, parsedUrl);
});
server.listen(port, err => {
if (err) throw err;
console.log('> Ready on http://localhost:' + port);
});
});
After you get rendered HTML you don't have to use the saved _resEnd function. Feel free to manipulate, serve as a file, or whatever you want with it.