Can you tell me how to config nginx to prerend my app.
The tutorial doesn't work, when I crawl the website with crawler googlebot, I see brackets from angular.
I'm lost.
many thanks in advance
my nginx config:
server {
listen 80;
listen [::]:80 default_server ipv6only=on;
return 301 https://$host$request_uri;
}
server {
listen 443;
server_name www.thedomain.com;
ssl on;
# Use certificate and key provided by Let's Encrypt:
ssl_certificate /etc/letsencrypt/live/www.thedomain.com/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/www.thedomain.com/privkey.pem;
ssl_session_timeout 5m;
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
ssl_prefer_server_ciphers on;
ssl_ciphers 'EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH';
location / {
# Proxy_pass configuration
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $http_host;
proxy_set_header X-NginX-Proxy true;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_max_temp_file_size 0;
proxy_pass http://0.0.0.0:3000;
proxy_redirect off;
proxy_read_timeout 240s;
}
}
}
This should work:
server {
listen 80;
listen [::]:80 default_server ipv6only=on;
return 301 https://$host$request_uri;
}
server {
listen 443;
server_name www.thedomain.com;
ssl on;
# Use certificate and key provided by Let's Encrypt:
ssl_certificate /etc/letsencrypt/live/www.thedomain.com/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/www.thedomain.com/privkey.pem;
ssl_session_timeout 5m;
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
ssl_prefer_server_ciphers on;
ssl_ciphers 'EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH';
location / {
#proxy_set_header X-Prerender-Token YOUR_TOKEN;
set $prerender 0;
if ($http_user_agent ~* "baiduspider|twitterbot|facebookexternalhit|rogerbot|linkedinbot|embedly|quora link preview|showyoubot|outbrain|pinterest|slackbot|vkShare|W3C_Validator") {
set $prerender 1;
}
if ($args ~ "_escaped_fragment_") {
set $prerender 1;
}
if ($http_user_agent ~ "Prerender") {
set $prerender 0;
}
if ($uri ~* "\.(js|css|xml|less|png|jpg|jpeg|gif|pdf|doc|txt|ico|rss|zip|mp3|rar|exe|wmv|doc|avi|ppt|mpg|mpeg|tif|wav|mov|psd|ai|xls|mp4|m4a|swf|dat|dmg|iso|flv|m4v|torrent|ttf|woff|svg|eot)") {
set $prerender 0;
}
#resolve using Google's DNS server to force DNS resolution and prevent caching of IPs
resolver 8.8.8.8;
if ($prerender = 1) {
#setting prerender as a variable forces DNS resolution since nginx caches IPs and doesnt play well with load balancing
set $prerender "service.prerender.io";
rewrite .* /$scheme://$host$request_uri? break;
proxy_pass http://$prerender;
}
# Proxy_pass configuration
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $http_host;
proxy_set_header X-NginX-Proxy true;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_max_temp_file_size 0;
proxy_pass http://0.0.0.0:3000;
proxy_redirect off;
proxy_read_timeout 240s;
}
}
}
Don't forget to remove the # from the proxy_set_header for X-Prerender-Token once you replace YOUR_TOKEN.
You will then want to test your URL be appending the ?_escaped_fragment_= query parameter. If your URLs look like:
https://www.example.com/
You'll test them like:
https://www.example.com/?_escaped_fragment_=
That will force a prerendered page so you can View Source there and make sure you're seeing a static HTML page instead of just the tags.
Related
Express.js version 4.17
my setup:
proxy(nginx) -> express.js-server (localhost:3000)
my url is: https://example.com/server/xy
Proxy send to express.js-server: localhost:3000/xy
server.get('/data.js', async (req, res, next) => {
console.log("req2", req.host, req.port, req.originalUrl, req.get('host'), req.path);
res.json({"done":"1"})
})
results in this log msg:
req2 example.com undefined /data.js example.com:443 /data.js
nginx conf:
http {
proxy_set_header Upgrade $http_upgrade;
#proxy_set_header Connection $connection_upgrade;
proxy_set_header Host $host:$server_port;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header X-Forwarded-Port $server_port;
proxy_set_header X-Forwarded-Proto https;
proxy_set_header X-Forwarded-Ssl on;
server {
set $server http://localhost:3000;
location /server{
rewrite ^/server/(.*) /$1 break;
proxy_pass $server;
proxy_redirect $server https://$host:$server_port/server/;
}
}
An idea how to get that basepath that seems to be lost between proxy and express.js?
Audios on prod are not working while working fine on dev environment (Angular 7).
Prod config (VPS):
Ubuntu 18
Nginx
Let's encrypt
AudioService:
export class AudioService {
audio = new Audio();
constructor() { }
isPlaying() {
return this.audio.currentTime > 0 && !this.audio.paused && !this.audio.ended && this.audio.readyState > 2;
}
play(name: string): void {
this.audio.src = `assets/audio/${name}`;
this.audio.crossOrigin = 'anonymous';
this.audio.load();
if (!this.isPlaying()) {
this.audio.play();
}
}
pause(): void {
if (this.isPlaying()) {
this.audio.pause();
}
}
}
CORS are enabled on Nodejs side (using Nestjs). main.ts:
app.enableCors();
Chrome log:
Uncaught (in promise) DOMException: Failed to load because no
supported source was found.
Firefox log:
NotSupportedError: The media resource indicated by the src attribute
or assigned media provider object was not suitable.
Looking at Network console we can see myaudio.wav with:
Status Code: 206 Partial Content
Note: Loading images works fine !
EDIT:
Nginx config /etc/nginx/sites-available/mywebsite:
# Redirection
server {
# if ($host = mywebsite.com) {
# return 301 https://$host$request_uri;
# } # managed by Certbot
listen 80;
listen [::]:80;
server_name mywebsite.com www.mywebsite.com;
return 301 https://$host$request_uri;
#return 404; # managed by Certbot
}
# Config
server {
server_name mywebsite.com www.mywebsite.com;
root /home/foo/mywebsite/gui;
index index.html index.htm;
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $http_host;
proxy_set_header X-NginX-Proxy true;
proxy_pass http://my.ip:3000/;
# Websocket
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
}
if ($host = 'www.mywebsite.com') {
return 301 https://mywebsite.com$request_uri;
}
listen [::]:443 ssl ipv6only=on; # managed by Certbot
listen 443 ssl; # managed by Certbot
ssl_certificate /etc/letsencrypt/live/mywebsite.com/fullchain.pem; # managed by Certbot
ssl_certificate_key /etc/letsencrypt/live/mywebsite.com/privkey.pem; # managed by Certbot
}
On dev environment localhost:4200/assets/audio/myaudio.wav → works fine
On prod environment https://mywebsite.com/assets/audio/myaudio.wav → returns home page
While https://mywebsite.com/assets/image.jpg → works fine
Only audios don't work.
Set max_ranges to 0.
For your case, this would look like something like this:
location ~ \.wav$ {
max_ranges 0;
}
Meaning the rule applies to every wav file regardless of their location.
I have a node application running on a service with Apache and Nginx as a reverse proxy.
On the same server also a Node REST API is running.
The JavaScript code looks as follows:
api.js
// Express
const express = require('express');
const bodyParser = require('body-parser');
const cors = require('cors');
// Express App
const app = express();
// Env
const PORT = process.env.PORT || 3000;
const NODE_ENV = process.env.NODE_ENV || 'development';
// Config
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: true }));
app.use(cors());
// Routes
const users = require('./routes/users');
// Angular Http content type for POST etc defaults to text/plain at
app.use(bodyParser.text(), function ngHttpFix(req, res, next) {
try {
req.body = JSON.parse(req.body);
next();
} catch(e) {
next();
}
});
app.use('/api', users);
app.listen(PORT, function() {
console.log('Listen on http://localhost:' + PORT + ' in ' + NODE_ENV);
});
/routes/users.js
var models = require('../models');
var express = require('express');
var router = express.Router();
// get all users
router.get('/users', function(req, res) {
models.Beekeeper.findAll({}).then(function(users) {
res.json(users);
});
});
module.exports = router;
The Nginx configuration looks as follows:
index index.html index.htm;
upstream api {
server 127.0.0.1:3000;
}
server {
listen 80;
server_name example.com;
return 301 https://$server_name$request_uri;
}
server {
listen 443;
root /var/www;
ssl on;
ssl_prefer_server_ciphers On;
ssl_protocols TLSv1.2;
ssl_session_cache shared:SSL:10m;
ssl_session_timeout 10m;
ssl_ciphers AES256+EECDH:AES256+EDH:!aNULL;
add_header Strict-Transport-Security "max-age=63072000; includeSubdomains; preload";
add_header X-Frame-Options DENY;
add_header X-Content-Type-Options nosniff;
ssl_dhparam /etc/nginx/ssl/dhparam.pem;
ssl_certificate /etc/letsencrypt/live/example.com/cert.pem;
ssl_certificate_key /etc/letsencrypt/live/example.com/privkey.pem;
server_name example.com;
location / {
proxy_pass http://127.0.0.1:8000;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host $host;
proxy_set_header X-Real-Ip $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
location /api {
proxy_pass http://api;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host $host;
proxy_set_header X-Real-Ip $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
rewrite ^/api/?(.*) /$1 break;
proxy_redirect off;
}
}
The problem is that if I make an API call on my development server, for example, localhost:3000/api/users, it works as expected.
However, if I make an API call on my production server, for example, https://example.com/api/users, I get Cannot GET /users and 404 NOT FOUND, respectively.
I suppose that there is something wrong with my Nginx configuration, however, although I already read numerous other posts about similar problems here on Stackoverflow, I could not solve the problem.
Notice that you're requesting this:
https://example.com/api/users
But the error says this:
Cannot GET /users
So the /api prefix is being stripped off the request path before being passed to your Node server.
Which is done by this line:
rewrite ^/api/?(.*) /$1 break;
Solution: remove that line.
I´ve got a Nginx server with following config
and an node.js Server.
server.js
app = express(),
cookieSession = require('cookie-session'),
app.use(cookieSession({
secret: config.session_secret,
resave: true,
saveUninitialized: true,
store: new Redis({
port: config.redis_port
}),
cookie: { max_age: 43200000, domain:"localhost"}
}));
nginx.conf
worker_processes 1;
events {
worker_connections 1024;
}
http {
upstream app {
server 127.0.0.1:3000;
}
server {
listen 80;
server_name localhost;
client_max_body_size 32m;
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $http_host;
proxy_set_header X-NginX-Proxy true;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_pass http://app/;
proxy_redirect off;
}
}
server {
listen 80;
server_name sub.localhost;
client_max_body_size 32m;
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $http_host;
proxy_set_header X-NginX-Proxy true;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_pass http://app/;
proxy_redirect off;
}
}
}
I´ve tried adding domain:".localhost" or even domain:"*.localhost" also i tried adding
app.use(function(req, res, next){
// Website you wish to allow to connect
res.setHeader('Access-Control-Allow-Origin', req.headers.host)
// Request methods you wish to allow
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, OPTIONS, PUT, PATCH, DELETE');
// Request headers you wish to allow
res.setHeader('Access-Control-Allow-Headers', 'X-Requested-With,content-type');
// Set to true if you need the website to include cookies in the requests sent
// to the API (e.g. in case you use sessions)
res.setHeader('Access-Control-Allow-Credentials', true);
next();
});
to the server.js
The problem is, when i authenticate on the localhost im not authenticated on sub.localhost.
From Login session across subdomains:
You can use: domain: ".app.localhost" and it will work. The 'domain' parameter needs 1 or more dots in the domain name for setting cookies.
hello i have to config multiple apps of nodejs using nginx...
this is my actual configuration
upstream domain.com.ar {
server 127.0.0.1:9000;
}
server {
listen 80;
server_name www.domain.com.ar;
rewrite ^/(.*) http://domain.com.ar/$1 permanent;
}
server {
listen 80;
listen [::]:80 ipv6only=on;
server_name domain.com.ar;
#access_log /var/log/nginx/domain.com/access.log;
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $http_host;
proxy_set_header X-NginX-Proxy true;
proxy_pass http://domain.com.ar;
proxy_redirect off;
}
}
when i use the ip 190.213.125.17:9000 (its just an example of my ip) the app works fine
but when i try to use the domain.com.ar nginx redirect to this page
http://domain.com.ar/cgi-sys/defaultwebpage.cgi
so the app respond with a 404 error configured by me in my app.js like a middleware
app.use(function(req, res, next){
res.render('404.jade',
{
title: "404 - Page Not Found",
showFullNav: false,
status: 404,
url: req.url
});
});
there is something wrong in the nginx config???