Webpack configuration for react-pdf in a Next.js project - javascript

Im using react-pdf library in Next.js to generate PDF, view PDF and download that PDF in a Static Client Side Next.js Application (Server is not involved). But I can't set up the Webpack for Next.js as I don't have much knowledge about it.
This is what the required setup for Webpack is for react-pdf:
const webpack = require('webpack')
module.exports = {
/* ... */
resolve: {
fallback: {
process: require.resolve('process/browser'),
zlib: require.resolve('browserify-zlib'),
stream: require.resolve('stream-browserify'),
util: require.resolve('util'),
buffer: require.resolve('buffer'),
asset: require.resolve('assert'),
},
},
plugins: [
new webpack.ProvidePlugin({
Buffer: ['buffer', 'Buffer'],
process: 'process/browser',
}),
],
/* ... */
}
And this is the next.config.js:
module.exports = {
webpack: (config, { buildId, dev, isServer, defaultLoaders, webpack }) => {
// Important: return the modified config
return config
},
}

That config parameter that next.config.js gives us is like that objet we export in a normal webpack.config.js. Try with this setup of next.config.js:
module.exports = {
webpack: (config, { buildId, dev, isServer, defaultLoaders, webpack }) => {
config.resolve.fallback = {
process: require.resolve("process/browser"),
zlib: require.resolve("browserify-zlib"),
stream: require.resolve("stream-browserify"),
util: require.resolve("util"),
buffer: require.resolve("buffer"),
asset: require.resolve("assert"),
};
config.plugins.push(
new webpack.ProvidePlugin({
Buffer: ["buffer", "Buffer"],
process: "process/browser",
})
);
return config;
},
};

Try to place setupProxy.js file in src directory. It content
const { createProxyMiddleware } = require('http-proxy-middleware');
module.exports = function(app, req, res) {
const options = {
target: 'http://serverurl.com',
changeOrigin: true,
pathRewrite: function(path, req){
// console.log('path BEFORE trans: %o', path);
const p = path.replace('/api', '');
// console.log('current_path: %o', p);
// if(path.indexOf('manage') !== -1){
// p = '/web-module-backend'+p;
// }
// console.log('PATH: %o', p);
return p;
},
onProxyRes: (proxyRes, req, res) => {
// log original request and proxied request info
const exchange = `[${req.method}] [${proxyRes.statusCode}] ${req.path} -> ${proxyRes.req.protocol}//${proxyRes.req.host}${proxyRes.req.path}`;
// console.log(req.headers);
// console.log(proxyRes.headers);
console.log(exchange); // [GET] [200] / -> http://www.example.com
console.log('Req URL: ' + req.originalUrl);
console.log('Response status code: ' + proxyRes.statusCode);
res.headers = proxyRes.headers;
},
onProxyReq: (proxyReq, req, res) => {
Object.defineProperty(proxyReq, 'headers', {
get(){
return {
host: 'http://urlserver.com',
authorization: 'Authstring', // req.headers.authorization,
}
},
set(){
}
})
console.log(proxyReq.headers);
},
onError: (err, req, res, target) => {
console.log(err);
}
}
const uiProxy = createProxyMiddleware(options)
app.use(
'/api',
uiProxy
);
};

Related

problem in set cookie and use proxy in vite react app

In Vite ReactJS Ts APP when try to set cookie and use proxy it's not working properly in production nut in the localhost it's working fine
that's my code at vite.config.ts
const defaultConfig = {
plugins: [
react({ include: "**/*.tsx", }),
],
server: {
proxy: {
'/api': {
target: 'https://tame-lime-rooster-tie.cyclic.app',
changeOrigin: true,
configure: (proxy, options) => {
proxy.on('proxyReq', (proxyReq, req, res) => {
if (!req.headers.cookie) {
return;
}
console.log('req.headers.cookie', req.headers.cookie)
proxyReq.setHeader('cookie', req.headers.cookie);
});
},
secure: false
}
}
}
}
export default defineConfig(({ command, mode }) => {
if (command === 'serve') {
const isDev = mode === 'development';
return {
...defaultConfig,
server: {
proxy: {
'/api': {
target: isDev ? 'http://localhost:5000' : 'https://tame-lime-rooster-tie.cyclic.app',
changeOrigin: true,
configure: (proxy, options) => {
proxy.on('proxyReq', (proxyReq, req, res) => {
if (!req.headers.cookie) {
return;
}
console.log('req.headers.cookie', req.headers.cookie)
proxyReq.setHeader('cookie', req.headers.cookie);
});
},
secure: false
}
}
}
}
} else {
return defaultConfig;
}
});
but it's working good only on localhost when i deploy the app it's not working good

Can't resolve 'path' in 'path' using description file: samplepath/package.json - Field 'browser' doesn't contain a valid alias configuration

I'm trying to run npm run build on files I've merged for a website. Unfortunately I always get this error. I think it has something to do with the paths but I'm not that good in JS/React to see where the error is.
Important part of the Index.html:
<body>
<div id="root"></div>
<script src="../src/index.js" type="text/jsx"></script>
</body>
Index.jsx
import ReactDOM from "react-dom";
import React from "react";
import App from "./components/App.jsx";
const init = async () => {};
ReactDOM.render(<App />, document.getElementById("root"));
init();
webpack.config.js
const path = require("path");
const webpack = require("webpack");
const HtmlWebpackPlugin = require("html-webpack-plugin");
const TerserPlugin = require("terser-webpack-plugin");
function initCanisterEnv() {
let localCanisters, prodCanisters;
try {
localCanisters = require(path.resolve(
".dfx",
"local",
"canister_ids.json"
));
} catch (error) {
console.log("No local canister_ids.json found. Continuing production");
}
try {
prodCanisters = require(path.resolve("canister_ids.json"));
} catch (error) {
console.log("No production canister_ids.json found. Continuing with local");
}
const network =
process.env.DFX_NETWORK ||
(process.env.NODE_ENV === "production" ? "ic" : "local");
const canisterConfig = network === "local" ? localCanisters : prodCanisters;
return Object.entries(canisterConfig).reduce((prev, current) => {
const [canisterName, canisterDetails] = current;
prev[canisterName.toUpperCase() + "_CANISTER_ID"] =
canisterDetails[network];
return prev;
}, {});
}
const canisterEnvVariables = initCanisterEnv();
const isDevelopment = process.env.NODE_ENV !== "production";
const frontendDirectory = "websitetwo_frontend";
const frontend_entry = path.join("src", frontendDirectory, "src", "index.html");
module.exports = {
target: "web",
mode: isDevelopment ? "development" : "production",
entry: {
index: path.join(__dirname, frontend_entry).replace(/\.html$/, ".js"),
},
devtool: isDevelopment ? "source-map" : false,
optimization: {
minimize: !isDevelopment,
minimizer: [new TerserPlugin()],
},
resolve: {
extensions: [".js", ".ts", ".jsx", ".tsx"],
fallback: {
assert: require.resolve("assert/"),
buffer: require.resolve("buffer/"),
events: require.resolve("events/"),
stream: require.resolve("stream-browserify/"),
util: require.resolve("util/"),
},
},
output: {
filename: "index.js",
path: path.join(__dirname, "dist", frontendDirectory),
},
plugins: [
new HtmlWebpackPlugin({
template: path.join(__dirname, frontend_entry),
cache: false,
}),
new webpack.EnvironmentPlugin({
NODE_ENV: "development",
...canisterEnvVariables,
}),
new webpack.ProvidePlugin({
Buffer: [require.resolve("buffer/"), "Buffer"],
process: require.resolve("process/browser"),
}),
],
// proxy /api to port 8000 during development
devServer: {
proxy: {
"/api": {
target: "http://127.0.0.1:8000",
changeOrigin: true,
pathRewrite: {
"^/api": "/api",
},
},
},
static: path.resolve(__dirname, "src", frontendDirectory, "assets"),
hot: true,
watchFiles: [path.resolve(__dirname, "src", frontendDirectory)],
liveReload: true,
},
};
Error
I have already searched for the error but it didnt solved it. What is wrong? Thank you for the help!

how to get pretty urls in hapi js

I have my server.js file working. at localhost:8080 it will serve the file i give it from the the corresponding url name like so http://localhost:8080/about.html, as long as the file exists in public/pages. I'm wondering if I can somehow set a wildcard to leave of extensions for all html files in the url so that I don't have to individually specify each file as an alias in the routes like - ['about','about.html'].
Here is my working code -
'use strict';
const Path = require('path');
const Hapi = require('hapi');
const server = new Hapi.Server();
server.connection({
port: Number(process.argv[2] || 8080),
host: 'localhost'
});
server.register(require('inert'), (err) => {
if (err) {
throw err;
}
server.route({
method: 'GET',
path: '/{param*}',
handler: {
directory: {
path: 'public/pages',
listing: true
}
},
config: {
state: {
parse: false, // parse and store in request.state
failAction: 'ignore' // may also be 'ignore' or 'log'
}
}
});
server.start((err) => {
if (err) {
throw err;
}
console.log('Server running at:', server.info.uri);
});
});
Any help is greatly appreciated, thank you.

Vuejs 2 Server side rendering - not working

I've been working on making my vuejs app function well with SSR but all of my tries failed. I really need help in this.
Please note that I'm using normal js files not .vue files with es6 and require the html templates using webpack require function.
The app works fine in development mode, however, when I start execute it using 'vue-server-renderer' and go to any route, this error will be thrown:
Error: render function or template not defined in component: anonymous
at normalizeRender (/Users/salaahassi/dev/vue/magicum/node_modules/vue-server-renderer/build.js:6015:13)
at renderComponent (/Users/salaahassi/dev/vue/magicum/node_modules/vue-server-renderer/build.js:6081:3)
at renderNode (/Users/salaahassi/dev/vue/magicum/node_modules/vue-server-renderer/build.js:6065:7)
at render (/Users/salaahassi/dev/vue/magicum/node_modules/vue-server-renderer/build.js:6257:5)
at RenderStream.render (/Users/salaahassi/dev/vue/magicum/node_modules/vue-server-renderer/build.js:6312:9)
at RenderStream.tryRender (/Users/salaahassi/dev/vue/magicum/node_modules/vue-server-renderer/build.js:96:12)
at RenderStream._read (/Users/salaahassi/dev/vue/magicum/node_modules/vue-server-renderer/build.js:125:12)
at RenderStream.Readable.read (_stream_readable.js:348:10)
at resume_ (_stream_readable.js:737:12)
at _combinedTickCallback (internal/process/next_tick.js:74:11)
Also, when I disable javascript on my browser, even home page will disappear (that's of course because it's not working from the SSR).
Here is my webpack:
var path = require('path')
var webpack = require('webpack')
var HTMLPlugin = require('html-webpack-plugin');
var CopyWebpackPlugin = require('copy-webpack-plugin');
var ExtractTextPlugin = require("extract-text-webpack-plugin");
var extractCSS = new ExtractTextPlugin('styles.css');
var options = {
// entry: './entry.client.js',
entry: {
app: './entry.client.js',
vendor: [
'vue',
'vue-router',
'vuex',
'vuex-router-sync',
'moment',
'axios'
]
},
output: {
path: path.resolve(__dirname, './dist'),
publicPath: '/',
filename: '[name].[hash].js',
},
module: {
noParse: /es6-promise\.js$/, // avoid webpack shimming process
rules: [
{
test: /\.html$/,
loader: 'raw-loader'
},
{
test: /\.js$/,
loader: 'babel-loader',
exclude: /node_modules/
},
{
test: /\.json$/,
loader: 'json-loader'
},
{
test: /\.(png|jpg|gif|svg|woff|woff2|eot|ttf)$/,
loader: 'file-loader',
options: {
name: '[name].[ext]?[hash]'
}
},
{
test: /\.scss$/,
loader: extractCSS.extract('css-loader!sass-loader')
}
]
},
plugins: [
extractCSS,
new webpack.ContextReplacementPlugin(/moment[\\\/]locale$/, /^\.\/(en|zh-tw)$/),
new webpack.DefinePlugin({
'process.env': {
'NODE_ENV': JSON.stringify(process.env.NODE_ENV) || 'development',
'VUE_ENV': JSON.stringify(process.env.VUE_ENV) || 'client',
}
})
],
resolve: {
alias: {
'vue$': 'vue/dist/vue'
}
},
devServer: {
historyApiFallback: true,
noInfo: true
},
devtool: '#eval-source-map'
}
console.log("xxxxx ---node env---- xxxx", process.env.NODE_ENV);
console.log("xxxxx ---vue env---- xxxx", process.env.VUE_ENV);
if (process.env.NODE_ENV != 'development') {
options.entry = './entry.server.js';
options.target = 'node';
options.output.filename = 'bundle-server.js';
options.output.libraryTarget = 'commonjs2';
options.externals = Object.keys(require('./package.json').dependencies);
}
if (process.env.NODE_ENV == 'development') {
options.plugins = (options.plugins || []).concat([
new HTMLPlugin({
template: './index.html'
}),
// extract vendor chunks for better caching
new webpack.optimize.CommonsChunkPlugin({
name: 'vendor'
})
]);
}
if (process.env.VUE_ENV == 'server') {
options.devtool = '#source-map'
options.plugins = (options.plugins || []).concat([
new webpack.optimize.UglifyJsPlugin({
//sourceMap: true,
compress: {
warnings: false
}
}),
new webpack.LoaderOptionsPlugin({
minimize: true
}),
new CopyWebpackPlugin([
{from: './assets', to: 'assets'},
{from: './index.html'}
])
])
}
module.exports = options;
And here is my server entry file:
import { app, router, store } from './src/app'
export default context => {
// set router's location
router.push(context.url)
// call prefetch hooks on components matched by the route
const s = Date.now()
return Promise.all(router.getMatchedComponents().map(component => {
if (component.prefetch) {
return component.prefetch(store)
}
})).then(() => {
console.log(`data pre-fetch: ${Date.now() - s}ms`)
// set initial store on context
// the request handler will inline the state in the HTML response.
context.initialState = store.state
return app
})
}
Here is my server.js:
'use strict'
const fs = require('fs')
const path = require('path')
const resolve = file => path.resolve(__dirname, file)
const express = require('express')
// const favicon = require('serve-favicon')
const serialize = require('serialize-javascript')
const createBundleRenderer = require('vue-server-renderer').createBundleRenderer
const app = express()
// parse index.html template
const template = fs.readFileSync(resolve('./dist/index.html'), 'utf-8')
// create server renderer from real fs
const bundlePath = resolve('./dist/bundle-server.js')
let renderer = createRenderer(fs.readFileSync(bundlePath, 'utf-8'))
console.log(renderer);
function createRenderer (bundle) {
return createBundleRenderer(bundle, {
cache: require('lru-cache')({
max: 1000,
maxAge: 1000 * 60 * 15
})
})
}
var options = {
maxAge: '60d',
setHeaders: function(res, path, stat) {
// Webfonts need to have CORS * set in order to work.
if (path.match(/ttf|woff|woff2|eot|svg/ig)) {
res.set('Access-Control-Allow-Origin', '*');
}
}
};
var dist_path = '/dist/';
app.use(express.static(path.join(__dirname, dist_path), options));
console.log("............");
app.get('*', (req, res) => {
console.log(".....ROUTE.......", req.url);
console.log('renderer', renderer);
if (!renderer) {
return res.end('waiting for compilation... refresh in a moment.')
}
var s = Date.now()
const context = { url: req.url }
const renderStream = renderer.renderToStream(context)
let firstChunk = true
// console.log(html.head);
// res.write(html.head)
renderStream.on('data', chunk => {
if (firstChunk) {
// embed initial store state
if (context.initialState) {
res.write(
`<script>window.__INITIAL_STATE__=${
serialize(context.initialState, { isJSON: true })
}</script>`
)
}
firstChunk = false
}
res.write(chunk)
})
renderStream.on('end', () => {
res.end(template)
console.log(`whole request: ${Date.now() - s}ms`)
})
renderStream.on('error', err => {
throw err
})
})
const port = process.env.PORT || 3000
app.listen(port, () => {
console.log(`server started at http://localhost:${port}`)
})
Does your index.html template has the placeholder <!--vue-ssr-outlet-->?

gulpfile browsersync nodemon too much ram

My gulpfile is not functioning how I'd like. When I run the default task gulp my browser is launched but hangs at waiting for localhost... in the lower left corner. If I refresh then the server works as expected. I'd also like to edit my code and get the updates showing in my browser. This feature works but gulp grows to ~300mb of ram after developing for a while.
'use strict';
process.env.DEBUG = process.env.DEBUG || 'r3dm:*';
var gulp = require('gulp'),
// ## Style
concat = require('gulp-concat'),
stylus = require('gulp-stylus'),
swiss = require('kouto-swiss'),
mincss = require('gulp-minify-css'),
// ## Bundle
browserify = require('browserify'),
watchify = require('watchify'),
envify = require('envify/custom')({ NODE_ENV: 'development' }),
uglifyify = require('uglifyify'),
bundleName = require('vinyl-source-stream'),
//brfs = require('brfs'),
// ## utils
plumber = require('gulp-plumber'),
util = require('gulp-util'),
noopPipe = util.noop,
//logPipe = util.log,
watch = require('gulp-watch'),
yargs = require('yargs').argv,
debug = require('debug')('r3dm:gulp'),
// ## min
imagemin = require('gulp-imagemin'),
//pngcrush = require('imagemin-pngcrush'),
// ## Serve/Proxy/Reload
nodemon = require('gulp-nodemon'),
sync = require('browser-sync'),
reload = sync.reload,
// ## React
react = require('gulp-react'),
// ## production?
production = yargs.p;
var paths = {
main: './client.js',
jsx: './components/**/**.jsx',
stylusMain: './components/app.styl',
stylusAll: './components/**/*.styl',
css: './public/css/',
server: './server.js',
serverIgnore: [
'gulpfile.js',
'public/',
'components/**/*.styl',
'bower_components/',
'node_modules/'
],
publicJs: './public/js'
};
var watching = false;
var reloadDelay = 6500;
if (production) {
// ## Set with `-p`
console.log('\n', 'Production mode set', '\n');
}
gulp.task('stylus', function() {
return gulp.src(paths.stylusMain)
.pipe(plumber())
.pipe(stylus({
use: [
swiss()
],
'include css': true
}))
.pipe(concat('main.css'))
.pipe(production ? mincss() : noopPipe())
.pipe(gulp.dest(paths.css));
});
gulp.task('jsx', function() {
return gulp.src('./components/**/*.jsx')
.pipe(react())
.pipe(gulp.dest('./components'));
});
gulp.task('jsx-watch', function() {
return gulp.src(paths.jsx)
.pipe(watch(paths.jsx))
.pipe(react({
harmony: true
}))
.pipe(gulp.dest('./components'));
});
gulp.task('bundle', function(cb) {
browserifyCommon(cb);
});
gulp.task('sync', ['bundle', 'stylus', 'server'], function() {
sync.init(null, {
proxy: 'http://localhost:9000',
logLeval: 'debug',
files: [
'public/**/*.*',
'!public/js/bundle.js'
],
port: 9002,
open: true,
reloadDelay: reloadDelay
});
});
gulp.task('server', function(cb) {
var called = false;
nodemon({
script: paths.server,
ext: '.js',
ignore: paths.serverIgnore,
env: {
'NODE_ENV': 'development',
'DEBUG': 'r3dm:*'
}
})
.on('start', function() {
if (!called) {
called = true;
setTimeout(function() {
cb();
}, reloadDelay);
}
})
.on('restart', function(files) {
if (files) {
debug('Files that changed: ', files);
}
setTimeout(function() {
debug('Restarting browsers');
reload();
}, reloadDelay);
});
});
gulp.task('watch', function() {
gulp.watch(paths.stylusAll, ['stylus']);
});
gulp.task('setWatch', function() {
watching = true;
});
gulp.task('image', function() {
gulp.src('images/**/*')
.pipe(imagemin({
progressive: true,
optimizationLevel: 2
}))
.pipe(gulp.dest('public/images'));
});
gulp.task('default', [
'setWatch',
'jsx-watch',
'bundle',
'stylus',
'server',
'sync',
'watch'
]);
function browserifyCommon(cb) {
cb = cb || noop;
var config;
if (watching) {
config = {
basedir: __dirname,
debug: true,
cache: {},
packageCache: {}
};
} else {
config = {
basedir: __dirname
};
}
var b = browserify(config);
b.transform(envify);
//b.transform(brfs);
if (!production) {
debug('Watching');
b = watchify(b);
b.on('update', function() {
bundleItUp(b);
});
}
if (production) {
debug('Uglifying bundle');
b.transform({ global: true }, uglifyify);
}
b.add(paths.main);
bundleItUp(b);
cb();
}
function bundleItUp(b) {
debug('Bundling');
return b.bundle()
.pipe(plumber())
.pipe(bundleName('bundle.js'))
.pipe(gulp.dest(paths.publicJs));
}
function noop() { }
update: It may not be my gulpfile. I waited patiently and it eventually loads my app's root page. Here's my server.js file
'use strict';
require('dotenv').load();
require('newrelic');
var express = require('express'),
app = express(),
keystone = require('keystone'),
mongoose = require('mongoose'),
// ## Util
debug = require('debug')('r3dm:server'),
utils = require('./utils/utils'),
// ## React
React = require('react'),
Router = require('./components/Router'),
state = require('express-state'),
// ## Flux
Fetcher = require('fetchr'),
mandrillServ = require('./services/mandrill'),
blogServ = require('./services/blog'),
ContextStore = require('./components/common/Context.store'),
RouterStateAction = require('./components/common/RouterState.action'),
// ## Express/Serve
morgan = require('morgan'),
serve = require('serve-static'),
favicon = require('serve-favicon'),
body = require('body-parser'),
multer = require('multer'),
compress = require('compression'),
cookieParser = require('cookie-parser'),
session = require('express-session'),
flash = require('connect-flash'),
helmet = require('helmet');
// ## State becomes a variable available to all rendered views
state.extend(app);
app.set('state namespace', 'R3DM');
app.set('port', process.env.PORT || 9000);
app.set('view engine', 'jade');
app.use(helmet());
app.use(morgan('dev'));
app.use(favicon(__dirname + '/public/images/favicon.ico'));
app.use(cookieParser('12345'));
app.use(body.urlencoded({ extended: false }));
app.use(body.json());
app.use(multer());
app.use(compress());
app.use(flash());
app.use(session({
secret: 'keyboard cat',
resave: false,
saveUninitialized: true
}));
// ## Fetcher middleware
Fetcher.registerFetcher(mandrillServ);
Fetcher.registerFetcher(blogServ);
app.use('/api', Fetcher.middleware());
keystone.app = app;
keystone.mongoose = mongoose;
keystone.init({
'cookie secret': '12345',
'auth': true,
'user model': 'User',
'mongo': process.env.MONGO_URI,
'session': true
});
keystone.import('models');
keystone.static(app);
keystone.routes(app);
keystone.mongoose.connect(keystone.get('mongo'));
app.use(serve('./public'));
app.get('/500', function(req, res) {
res.render('500');
});
app.get('/emails/:name', function(req, res) {
var locals = {},
name = req.params.name,
nameArr;
nameArr = name
.split(' ')
.map(function(_name) {
_name = _name.replace(/[^A-Za-z_'-]/gi, '');
_name = utils.capitalize(_name);
return _name;
});
locals.name = nameArr[0];
res.render('email/greet', locals);
});
app.get('/*', function(req, res, next) {
debug('req', req.path);
debug('decode req', decodeURI(req.path));
Router(decodeURI(req.path))
.run(function(Handler, state) {
Handler = React.createFactory(Handler);
debug('Route found, %s ', state.path);
var ctx = {
req: req,
res: res,
next: next,
Handler: Handler,
state: state
};
debug('Sending route action');
RouterStateAction(ctx);
});
});
// Use a hot observable stream for requests
var hotObservable = ContextStore.publish();
// Run on next sequence
hotObservable.subscribe(function(ctx) {
if (!ctx.Handler) { return debug('no handler'); }
debug('rendering react to string', ctx.state.path);
var html = React.renderToString(ctx.Handler());
debug('rendering jade');
ctx.res.render('layout', { html: html }, function(err, markup) {
if (err) { return ctx.next(err); }
debug('Sending %s', ctx.state.path);
return ctx.res.send(markup);
});
});
// Start listening listening to observable sequence;
hotObservable.connect();
app.use(function(req, res) {
res.status(404);
res.render(404);
});
app.use(function(err, req, res, next) { //jshint ignore:line
debug('Err: ', err);
res
.status(500)
.send('Something went wrong');
});
// keystone.start();
app.listen(app.get('port'), function() {
debug('The R3DM is go at: ' + app.get('port'));
debug(new Date());
});
line debug('req', req.path); is eventually reached after ~120 seconds. But if I refresh when the tab is first opened it loads immediately.
update 2: I was able to fix the initial loading issue by increading the dealy to 6500ms. Now I need to find what's causing the memory leak.

Categories

Resources