problems with vue-cli for production - javascript

I am currently working on a vue-cli app,but I am having problems to run the app on production locally. After finishing everything on the dev environment I used the following command to build my production version of the app.
npm run build
the following is my config/prod.env.js file:
'use strict'
module.exports = {
NODE_ENV: '"production"'
}
config/index.js file:
'use strict'
// Template version: 1.3.1
// see http://vuejs-templates.github.io/webpack for documentation.
const path = require('path')
module.exports = {
dev: {
// Paths
assetsSubDirectory: 'static',
assetsPublicPath: '/',
proxyTable: {},
// Various Dev Server settings
host: 'localhost', // can be overwritten by process.env.HOST
port: 8080, // can be overwritten by process.env.PORT, if port is in use, a free one will be determined
autoOpenBrowser: false,
errorOverlay: true,
notifyOnErrors: true,
poll: false, // https://webpack.js.org/configuration/dev-server/#devserver-watchoptions-
/**
* Source Maps
*/
// https://webpack.js.org/configuration/devtool/#development
devtool: 'cheap-module-eval-source-map',
// If you have problems debugging vue-files in devtools,
// set this to false - it *may* help
// https://vue-loader.vuejs.org/en/options.html#cachebusting
cacheBusting: true,
cssSourceMap: true
},
build: {
// Template for index.html
index: path.resolve(__dirname, '../dist/index.html'),
// Paths
assetsRoot: path.resolve(__dirname, '../dist'),
assetsSubDirectory: 'static',
assetsPublicPath: '/',
/**
* Source Maps
*/
productionSourceMap: true,
// https://webpack.js.org/configuration/devtool/#production
devtool: '#source-map',
// Gzip off by default as many popular static hosts such as
// Surge or Netlify already gzip all static assets for you.
// Before setting to `true`, make sure to:
// npm install --save-dev compression-webpack-plugin
productionGzip: false,
productionGzipExtensions: ['js', 'css'],
// Run the build command with an extra argument to
// View the bundle analyzer report after build finishes:
// `npm run build --report`
// Set to `true` or `false` to always turn it on or off
bundleAnalyzerReport: process.env.npm_config_report
}
}
config/dev.env.js:
'use strict'
const merge = require('webpack-merge')
const prodEnv = require('./prod.env')
module.exports = merge(prodEnv, {
NODE_ENV: '"development"'
})
build/build.js:
'use strict'
require('./check-versions')()
process.env.NODE_ENV = 'production'
const ora = require('ora')
const rm = require('rimraf')
const path = require('path')
const chalk = require('chalk')
const webpack = require('webpack')
const config = require('../config')
const webpackConfig = require('./webpack.prod.conf')
const spinner = ora('building for production...')
spinner.start()
rm(path.join(config.build.assetsRoot, config.build.assetsSubDirectory), err => {
if (err) throw err
webpack(webpackConfig, (err, stats) => {
spinner.stop()
if (err) throw err
process.stdout.write(stats.toString({
colors: true,
modules: false,
children: false, // if you are using ts-loader, setting this to true will make typescript errors show up during build
chunks: false,
chunkModules: false
}) + '\n\n')
if (stats.hasErrors()) {
console.log(chalk.red(' Build failed with errors.\n'))
process.exit(1)
}
console.log(chalk.cyan(' Build complete.\n'))
console.log(chalk.yellow(
' Tip: built files are meant to be served over an HTTP server.\n' +
' Opening index.html over file:// won\'t work.\n'
))
})
})
it created a dist folder in which I initialised a package.json and installed express on it:
package.json:
{
"name": "project_gorilla_production",
"version": "0.0.0",
"description": "THe production version for project gorilla",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [
"ProjectGorilla",
"BkChatLDN",
"zezemills"
],
"author": "Christopher Salay",
"license": "ISC",
"dependencies": {
"express": "^4.16.4"
}
}
my server.js file:
const express = require('express');
const app = express();
const path = require('path');
app.get('/', function(req, res) {
res.sendFile(path.join(__dirname, 'index.html'));
});
app.listen(process.env.PORT || 8000, function(){
console.log('Your node js server is running');
})
index.html file:
<!DOCTYPE html><html><head><meta charset=utf-8><meta name=viewport content="width=device-width,initial-scale=1"><title>project_gorilla</title><script src=https://cdnjs.cloudflare.com/ajax/libs/socket.io/2.1.1/socket.io.js></script><link href=/static/css/app.b2785d7282208bedd7a467d4d7584204.css rel=stylesheet></head><body><div id=app></div><script type=text/javascript src=/static/js/manifest.2ae2e69a05c33dfc65f8.js></script><script type=text/javascript src=/static/js/vendor.91dc1c7d90da0f3312fd.js></script><script type=text/javascript src=/static/js/app.75194bcb7c3977e313be.js></script></body></html>
I get the following error when I run node server.js:
Refused to apply style from 'http://localhost:8000/static/css/app.b2785d7282208bedd7a467d4d7584204.css' because its MIME type ('text/html') is not a supported stylesheet MIME type, and strict MIME checking is enabled.
and a lot of my files such as the manifest, the vendor and the app js files gives of a 404 error.

Related

npm build to two dist folder

I have a Gatsbyjs project and I'm trying to build into 2 different folders public and production.
The public folder will be used for development purposes.
I have tried npm build ./production. It is building into the public folder.
Despite not being a common purpose, you can achieve that changing you gatsby-node.js via Gatsby API (onPostBuild):
const path = require("path")
const fs = require("fs")
exports.onPreInit = () => {
if (process.argv[2] === "build") {
fs.rmdirSync(path.join(__dirname, "dist"), { recursive: true })
fs.renameSync(
path.join(__dirname, "public"),
path.join(__dirname, "public_dev")
)
}
}
exports.onPostBuild = () => {
fs.renameSync(path.join(__dirname, 'public'),
path.join(__dirname, 'dist'), { overwrite: true })
fs.renameSync(
path.join(__dirname, "public_dev"),
path.join(__dirname, "public")
)
}
In the example above, you will output in/dist when running gatsby build command.
Useful GitHub issues:
https://github.com/gatsbyjs/gatsby/issues/18975
https://github.com/gatsbyjs/gatsby/issues/14703

Knex required configuration option 'client' is missing error

Here are my files.
knexfile.js
require('dotenv').config();
module.exports = {
development: {
client: process.env.DB_CLIENT,
connection: {
host: process.env.DB_HOST,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME
},
migrations: {
directory: __dirname + '/db/migrations'
},
seeds: {
directory: __dirname + '/db/seeds'
}
}
};
knex.js
const environment = process.env.NODE_ENV || 'development';
let config = require('../knexfile')[environment];
module.exports = require('knex')(config);
index.js
require('babel-register');
import express from 'express';
const port = process.env.PORT || 5000;
const app = express();
app.listen(port, () => {
console.log('Server running on portt:', port); // eslint-disable-line
});
export default app;
Now when i run following command:
knex migrate:make create_employee_and_company_tables
It gives folllowing error
Error: knex: Required configuration option 'client' is missing.
at new Client (/Users/sujin.v2px/NodeJS/nodees6/node_modules/knex/lib/client.js:99:11)
at Knex (/Users/sujin.v2px/NodeJS/nodees6/node_modules/knex/lib/index.js:56:34)
at initKnex (/usr/local/lib/node_modules/knex/bin/cli.js:73:10)
at Command.<anonymous> (/usr/local/lib/node_modules/knex/bin/cli.js:139:22)
at Command.listener (/usr/local/lib/node_modules/knex/node_modules/commander/index.js:315:8)
at emitTwo (events.js:126:13)
at Command.emit (events.js:214:7)
...
Am I missing some configurations? What does the client missing actually refer to?
This is an answer that may be helpful for some people that land here, because of the same issue where they are using typescript. (beyond the point of dotEnv issue (check the other answer for that)).
'client' is missing error and Typescript
The problem is that your typescript export default is not supported by knex cli by default.
To illustrate:
This doesn't work throwing the error above:
And this work:
As you can see, you can use typescript normally, even the import syntax and all. Then when you export you need to use the commonjs syntax directly.
If not appreciated, you can check this github issue for solution:
https://github.com/tgriesser/knex/issues/1232
I don't know how knex resolve for tsconfig.json. It may matter. And you may add a new tsconfig.json where knexfile.ts is located.
In my case i had that in my config (it was in my project root and not where knexfile.ts [for the project compilation])
"compilerOptions": {
/* Basic Options */
// "incremental": true, /* Enable incremental compilation */
"target": "ES2018", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
// "lib": [], /* Specify library files to be included in the compilation. */
"allowJs": true,
you may like to change the target.
Another important point, you must have node-ts installed, as it is used under the hood. However if you don't you may have another complete error. And don't forget to install your clients Çıpm i --save pg sqlite3 node-ts typescript knex. (you may like to separate dev dependencies).
I will update after more investigation. To explain deeply the why!
In order to use environment variables from your .env file, pass a path argument to config like this:
require('dotenv').config({path: 'path-to-.env'})
https://github.com/tgriesser/knex/issues/590
What solved this problem for me was in my Knexfile I was using a non-standard environment name:
let dbConnection = {
client : "pg",
connection: connectionObject,
migrations: {
directory: './db/migrations'
},
useNullAsDefault: true
};
module.exports = {
connection: dbConnection
};
So I had to run knex migrate:make --env connection migration_name and it worked as expected.
I advise placing the client immediately below module.exports >>>
module.exports = {
client: 'postgresql',
connection: {
database:'nomedobanco',
user:'user',
password:'senha'
}
You have mentioned require('dotenv').config();
require('dotenv').config();
module.exports = {
development: {
client: process.env.DB_CLIENT,
connection: {
host: process.env.DB_HOST,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME
},
migrations: {
directory: __dirname + '/db/migrations'
},
seeds: {
directory: __dirname + '/db/seeds'
}
}
};
Please make sure you have .env file in the root folder which contains your environmental variables.
# Application
APP_PORT=3000
APP_HOST=127.0.0.1
# Environment
NODE_ENV = development
# Database
DB_CLIENT=mysql
DB_HOST=localhost
DB_USER=myuser
DB_PASSWORD=*******
DB_NAME=vts
DB_PORT=3308
Once you have ".env" file in the root folder, you will see your this error would be gone. Also make sure you have mentioned a correct DB_CLIENT in your .env file.
Just another possibility here since I didn't see anyone mention it yet:
If you are also using knexfile and you are sure your client is set properly, e.g. "pg".
Then make sure your environment variable matches with the knexfile.
What I mean is that, run echo $NODE_ENV to see what your NODE_ENV is.
In my case, mine is actually dev not development(default in knexfile).
Your process.env.DB_CLIENT is undefined. You can verify it by hardcoding
client: 'pg',
without trying to use environment variables / dotenv.
In case where all configuration reading was failed and configuration would have been undefined, different error would have been thrown (cannot read client of undefined).
I Observed knexfile.js does not support env config without path.
So use as below :
require('dotenv').config({path: './'});
The simple answer of this error is you are missing client.
You just need to define all three environment development staging and production
module.exports = {
development: {
client: "mysql",
connection: {
user: process.env.DB_USER,
host: process.env.DB_HOST,
password:process.env.DB_PASSWORD,
database: process.env.DB_NAME
},
migrations: {
directory: __dirname + '/db/migrations',
},
seeds: {
directory: __dirname + '/db/seeds'
}
},
staging: {
client: "mysql",
connection: {
user: process.env.DB_USER,
host: process.env.DB_HOST,
password:process.env.DB_PASSWORD,
database: process.env.DB_NAME
},
migrations: {
directory: __dirname + '/db/migrations',
},
seeds: {
directory: __dirname + '/db/seeds'
}
},
production: {
client: "mysql",
connection: {
user: process.env.DB_USER,
host: process.env.DB_HOST,
password:process.env.DB_PASSWORD,
database: process.env.DB_NAME
},
migrations: {
directory: __dirname + '/db/migrations',
},
seeds: {
directory: __dirname + '/db/seeds'
}
}
};

dotenv not working with serverless/webpack

EDIT: If I log out dotenv.config() I get an error of : Error: ENOENT: no such file or directory, open '/Users/myPathToApplication/.webpack/test/.env'
I am bundling my serverless handler in order to use es6/es7 code. I have some env variables that I am trying to use as well. The problem is it seems that dotenv is not working when I bundle the handler.
For example one of the utils I am using is connecting mongoose to my application. In here I store the DB_URI as an env variable. import envdotjs from 'envdotjs';
import mongoose from 'mongoose';
mongoose.Promise = global.Promise;
require('dotenv').config();
let isConnected;
const connectToDatabase = () => {
if (isConnected) {
console.log('=> using existing database connection');
return Promise.resolve();
}
console.log('=> using new database connection');
return mongoose.connect(process.env.DB_URI).then(db => {
isConnected = db.connections[0].readyState;
});
};
module.exports = {
connectToDatabase
};
However the DB_URI is undefined and the code breaks.
Here is my webpack:
const slsw = require('serverless-webpack');
const nodeExternals = require('webpack-node-externals');
module.exports = {
entry: slsw.lib.entries,
target: 'node',
devtool: 'source-map',
externals: [nodeExternals()],
mode: slsw.lib.webpack.isLocal ? 'development' : 'production',
module: {
rules: [
{
test: /\.js$/,
loader: 'babel-loader',
include: __dirname,
exclude: /node_modules/
}
]
}
};
I am running this in order to use es6/7 on serverless handler which is working just fine. But the env variables are breaking. I also tried using a module called envdotjs and got the same results that the env variables are undefined so I don't think this is a problem with dotenv.
I found a package dotenv-webpack also recommended by #apokryfos. Just require it in const Dotenv = require('dotenv-webpack') and include it in the webpack.config.js.
module.exports = {
...
plugins: [new Dotenv()]
}
Just include your .env in the root with your webpack.config.js and you can declare your process.env. anywhere you need to with no other configuration.

How to create/generate/export a file from my webpack 2 config to be used inside of my React code?

I am passing in NODE_ENV variables into my webpack.config from package.json in order to return an object that either contains API endpoints for localhost or production.
1) package.json
"scripts": {
"dev": "NODE_ENV=development webpack-dev-server --history-api-fallback",
"prod": "NODE_ENV=production webpack -p",
"build": "NODE_ENV=production webpack -p"
}
2) endpoints.js
function endpoints(env) {
let prefix = env === 'development' ? 'http://localhost' : '';
return {
"login": `${prefix}/app/api/login`
}
}
module.exports = endpoints;
3) webpack.config
const webpack = require('webpack')
const HtmlWebpackPlugin = require("html-webpack-plugin");
const ExtractTextPlugin = require("extract-text-webpack-plugin");
const CopyWebpackPlugin = require("copy-webpack-plugin");
const path = require("path");
const dist = path.resolve(__dirname, "dist");
const src = path.resolve(__dirname, "src");
const endpoints = require("./src/endpoints");
const api = endpoints(process.env.NODE_ENV);
console.log('webpack api', api);
module.exports = {
context: src,
entry: [
"./index.js"
],
output: {
path: dist,
// ....
Here below I can see the console.log of the const api.
Now my question is, how do I now generate or export out an actual file api to be used inside of my src/services/api file:
import axios from 'axios'
// import api from '../../webpack.config' <-- ?
// import api from '../../api.js <-- ?
const log = (method, err) => {
console.error(`%c${method}`, 'background: #393939; color: #F25A43', err);
return null;
};
export const userLogin = (username, password) => {
const post_data = { username, password };
return axios.post('http://localhost/app/api/login', post_data) // <-- api to be used here
.then(res => res)
.catch((err) => log('api.userLogin', err));
};
I think this is an XY problem. You could generate the files with a bit of Node (something like fs.writeFileSync('api.js', contents), or you could do it with a bit of shell scripting, but you could also just use env in your code using DefinePlugin (example: new webpack.DefinePlugin({ env: JSON.stringify(process.env.NODE_ENV) }). Then you'd be able to access env in your code directly.

Webpack-Dev-Server With Gulp Not Reloading After Code Change

im new to webpack and im trying to get it to work with gulp. i am using the guide found at the following link, but it doesnt seem to be working:
https://webpack.github.io/docs/usage-with-gulp.html
can anyone tell me which part of my configuration is wrong?
gulpfile.js
import gulp from 'gulp';
import webpack from 'webpack';
import gutil from "gulp-util";
import WebpackDevServer from "webpack-dev-server";
import webpackConfig from './webpack.config';
gulp.task("dev-server", function(callback) {
// Start a webpack-dev-server
var compiler = webpack(webpackConfig);
new WebpackDevServer(compiler, {
}).listen(4000, "localhost", function(err) {
if(err) throw new gutil.PluginError("webpack-dev-server", err);
// Server listening
gutil.log("[webpack-dev-server]", "http://localhost:4000/webpack-dev-server/index.html");
// keep the server alive or continue?
// callback();
});
});
webpack.config.js
const path = require("path");
module.exports = {
watch: true,
entry: {
app: __dirname+'/dev/index.js'
},
output: {
path: path.join(__dirname, "dist"),
filename: '[name].js'
},
module: {
loaders: [
{test: /\.js$/, loaders: ['babel']},
{test: /\.scss$/, loaders: ["style", "css", "sass"]}
]
}
}
There are differences between the Node.js API and the CLI for webpack dev server. You are using the Node.js API so should see here: https://webpack.js.org/guides/hot-module-replacement/#via-the-node-js-api
Try something along these lines inside the gulp task defining function:
// Add entry points for '/webpack-dev-server/client/index.js' necessary for live reloading
WebpackDevServer.addDevServerEntrypoints(webpackConfig, { ... dev-server-options ...});
// Start a webpack-dev-server
var compiler = webpack(webpackConfig);
new WebpackDevServer(compiler, {
}).listen(4000, "localhost", function(err) {
if(err) throw new gutil.PluginError("webpack-dev-server", err);
// Server listening
gutil.log("[webpack-dev-server]", "http://localhost:4000/webpack-dev-server/index.html");
// keep the server alive or continue?
// callback();
});
Essentially add the one line WebpackDevServer.addDevServerEntrypoints(webpackConfig, { ... dev-server-options ...}); to the beginning of your task function. This will add "/webpack-dev-server/client/index.js" as an entry to your webpack config and is needed for live reloading.

Categories

Resources