Wallaby Does Not Pass Dotenv Variables to Runner - javascript

I am trying to use Wallaby in conjunction with the dotenv-flow package. I currently have my wallaby.js config file setup like below:
require("dotenv-flow").config()
module.exports = function (wallaby) {
return {
files: [
'api/*',
'controllers/*',
'config/*',
'firebase/*',
'helpers/*',
'models/*',
'services/*',
'smtp/*',
'sockets/*'
],
tests: [
"test/**/*.test.mjs"
],
testFramework: "mocha",
env: {
type: "node",
params: {
env: "NODE_ENV=test"
}
}
};
};
I've tried a few other ways of writing the file including in esm module format. However, my tests run and my sequelize code complains that it wasn't passed environment variables to use for connecting to the development DB.

You are loading your .env file but then never using it's contents. Another problem is that wallaby doesn't understand the dotenv output so you have to massage it a little bit.
const environment = Object.entries(
require("dotenv-flow").config()['parsed']).
map( x => `${x[0]}=${x[1]}`).join(';'),
Then change your environment to something like this
env: {
runner: 'node',
params: {
env: environment
}
}

Related

run pm2 with ES modules

How can i use pm2 in combination with a package based on ES Module (type:"module")
I looked into similar Questions without any useful help (some say it does not work on windows, but i am using linux)
I always receive the error:
Error [ERR_REQUIRE_ESM]: require() of ES Module /opt/app/server/lib/src/index.js not supported.
0|any| Instead change the require of index.js in null to a dynamic import() which is available in all CommonJS modules.
My ecosystem.config.js looks like:
const os = require('os');
module.exports = {
apps: [{
port : 3000,
name : "any",
script : "lib/src/index.js",
watch : true,
instances : os.cpus().length,
exec_mode : 'fork',
env: {
NODE_ENV: "production",
}
}]
}
index.js is a ES module using "import" syntax. How can i tell pm2 that is should use this way of importing
To achieve this you can create an intermediary CommonJS module which loads your application from ESModule. It's possible with import function available in commonJs modules.
This is how might this look:
ecosystem.config.js
lib/src/index.cjs CommonJS entry point (for PM2).
lib/src/index.js ESModule entry point (for ESM-compatible tools).
lib/src/app.js Application code.
ecosystem.config.js:
const os = require('os');
module.exports = {
apps: [{
port : 3000,
name : "any",
script : "lib/src/index.cjs", // 👈 CommonJS
watch : true,
instances : os.cpus().length,
exec_mode : 'fork',
env: {
NODE_ENV: "production",
}
}]
}
lib/src/index.js:
import {app} from './app.js'
app()
lib/src/index.cjs:
import('./app.js') // 👈 There is import function available in CommonJS
.then(({app}) => {
app()
})
lib/src/app.js:
import {hello} from './greet.js'
export function app() {
console.log(hello('World'))
}
lib/src/greet.js:
export function hello(name) {
return `Hello, ${name}!`
}
renaming ecosystem.config.js to ecosystem.config.cjs worked for me

How to get babel-node to convert files referenced by Workers to commonjs? (like babel does)

In an Express.js app, I'm using Babel to precompile to commonjs before starting it. The compilation step looks like this:
babel ./src --out-dir dist
node ./dist/bin
As part of the project I have a file called my-worker.js where I use import syntax:
# my-worker.js
import { parentPort, workerData } from 'worker_threads'
import axios from 'axios'
...
And that is used by other-file.js:
#other-file.js
...
const worker = new Worker(__dirname + '/my-worker.js', { workerData: ... })
...
This works fine. Babel converts all the files to commonjs, and loading the worker script works.
BUT
When I use #babel/node, this doesn't work:
babel-node ./src/bin
I get the warning:
(node:4865) Warning: To load an ES module, set "type": "module" in the package.json or use the .mjs extension.
Along with the error:
Cannot use import statement outside a module
I don't want to use "type": "module", since then I have to explicitly name file extensions, and also I'm not sure that import X, { y } from ... syntax is supported (which I like).
If I change my worker file to be my-worker.mjs, and change the new Worker statement accordingly, then that works with #babel/node, but not with my production build since filenames are changed back to .js.
How can I get #babel/node to load and cache (I guess this is what it needs to do?) files loaded by a Worker? Why does this work with #babel and not with #babel/node?
My .babelrc file looks like this:
{
"presets": [
[
"#babel/preset-env",
{
"useBuiltIns": "usage",
"corejs": 3,
"targets": {
"node": "13"
},
"modules": "commonjs"
}
]
]
}
The #babel/register API can help dynamically transpile a script source, as pointed out in https://github.com/babel/babel/issues/10972#issuecomment-572608142
You can use this approach with eval mode to make a single-file script. This might be useful if you use babel-node to run a command-line utility script.
import { isMainThread, Worker, workerData } from "worker_threads";
function createTranspiledWorker(filename, options) {
const transpile = `
require('#babel/register');
require(${JSON.stringify(filename)});
`;
return new Worker(transpile, { ...options, eval: true });
}
async function main() {
const w = createTranspiledWorker(__filename, { workerData: { hello: "world" } });
const exit = new Promise(resolve => w.on("exit", resolve));
await exit;
}
function worker() {
console.log("worker", workerData);
}
if (isMainThread) {
main();
} else {
worker();
}

How to inject Webpack build hash to application code

I'm using Webpack's [hash] for cache busting locale files. But I also need to hard-code the locale file path to load it from browser. Since the file path is altered with [hash], I need to inject this value to get right path.
I don't know how can get Webpack [hash] value programmatically in config so I can inject it using WebpackDefinePlugin.
module.exports = (env) => {
return {
entry: 'app/main.js',
output: {
filename: '[name].[hash].js'
}
...
plugins: [
new webpack.DefinePlugin({
HASH: ***???***
})
]
}
}
In case you want to dump the hash to a file and load it in your server's code, you can define the following plugin in your webpack.config.js:
const fs = require('fs');
class MetaInfoPlugin {
constructor(options) {
this.options = { filename: 'meta.json', ...options };
}
apply(compiler) {
compiler.hooks.done.tap(this.constructor.name, stats => {
const metaInfo = {
// add any other information if necessary
hash: stats.hash
};
const json = JSON.stringify(metaInfo);
return new Promise((resolve, reject) => {
fs.writeFile(this.options.filename, json, 'utf8', error => {
if (error) {
reject(error);
return;
}
resolve();
});
});
});
}
}
module.exports = {
// ... your webpack config ...
plugins: [
// ... other plugins ...
new MetaInfoPlugin({ filename: 'dist/meta.json' }),
]
};
Example content of the output meta.json file:
{"hash":"64347f3b32969e10d80c"}
I've just created a dumpmeta-webpack-plugin package for this plugin. So you might use it instead:
const { DumpMetaPlugin } = require('dumpmeta-webpack-plugin');
module.exports = {
...
plugins: [
...
new DumpMetaPlugin({
filename: 'dist/meta.json',
prepare: stats => ({
// add any other information you need to dump
hash: stats.hash,
})
}),
]
}
Please refer to the Webpack documentation for all available properties of the Stats object.
Seems like it should be a basic feature but apparently it's not that simple to do.
You can accomplish what you want by using wrapper-webpack-plugin.
plugins: [
new WrapperPlugin({
header: '(function (BUILD_HASH) {',
footer: function (fileName) {
const rx = /^.+?\.([a-z0-9]+)\.js$/;
const hash = fileName.match(rx)[1];
return `})('${hash}');`;
},
})
]
A bit hacky but it works — if u don't mind the entire chunk being wrapped in an anonymous function.
Alternatively you can just add var BUILD_HASH = ... in the header option, though it could cause problem if it becomes a global.
I created this plugin a while back, I'll try to update it so it provides the chunk hash naturally.
On server, you can get the hash by reading the filenames (example: web.bundle.f4771c44ee57573fabde.js) from your bundle folder.
You can pass the version to your build using webpack.DefinePlugin
If you have a package.json with a version, you can extract it like this:
const version = require("./package.json").version;
For example (we stringified the version):
new webpack.DefinePlugin({
'process.env.VERSION': JSON.stringify(version)
}),
then in your javascript, the version will be available as:
process.env.VERSION
The WebpackManifestPlugin is officially recommended in the output management guide. It writes a JSON to the output directory mapping the input filenames to the output filenames. Then you can inject those mapped values into your server template.
It's similar to Dmitry's answer, except Dmitry's doesn't appear to support multiple chunks.
That can be done with Webpack Stats Plugin. It gives you nice and neat output file with all the data you want. And it's easy to incorporate it to the webpack config files where needed.
E.g. To get hash generated by Webpack and use it elsewhere.
Could be achieved like:
# installation
npm install --save-dev webpack-stats-plugin
yarn add --dev webpack-stats-plugin
# generating stats file
const { StatsWriterPlugin } = require("webpack-stats-plugin")
module.exports = {
plugins: [
// Everything else **first**.
// Write out stats file to build directory.
new StatsWriterPlugin({
stats: {
all: false,
hash: true,
},
filename: "stats.json" // Default and goes straight to your output folder
})
]
}
# usage
const stats = require("YOUR_PATH_TO/stats.json");
console.log("Webpack's hash is - ", stats.hash);
More usage examples in their repo
Hope that helps!

Packaging code for AWS Lambda

I am trying to package code for AWS Lambda. Lambda has various restrictions, such as using Node 6.10, and not having a build step, like AWS EB does. I also am using NPM modules, so these will need to be bundled with the AWS Lambda handler.
Here is what I would like to do:
Define and use NPM modules (pure JS modules only)
Transpile all code (including NPM modules) to a JS version that Node 6.10 supports
Statically link all NPM modules into one big JS file
Upload that single file to AWS Lambda
For example, suppose I have an NPM module foo (node_modules/foo/index.js):
export default { x: 1 };
and I have my own code ('index.js'):
import foo from 'foo';
export const handler = (event, context, callback) => {
console.log(foo); // Will appear in CloudWatch logs
callback(null, 'OK');
};
The output would be something like this ('dist/bundle.js'):
var foo = { x: 1 };
exports.handler = function(event, context, callback) {
console.log(foo);
callback(null, 'OK');
};
I should be able to upload and run bundle.js on AWS Lambda without further modification.
How can I achieve this using existing JS tools?
You can use serverless with serverless-webpack
Then you deploy your bundle with serverless deploy
It turns out that this is possible, but it requires some tricky configuration to achieve. I have created a boiler-plate repo for others to use.
Here are the important bits...
First, you need a .babelrc that targets Node.js 6.10:
{
"presets": [
[
"env", {
"targets": {
"node": "6.10"
},
"loose": false,
"spec": true
}
]
]
}
Next, you need to configure Webpack to generate a commonjs library targetting node:
const path = require('path');
const webpack = require('webpack');
const debug = process.env.NODE_ENV !== 'production';
module.exports = {
context: __dirname,
entry: [ 'babel-polyfill', './index.js' ],
output: {
path: path.join(__dirname, 'out'),
filename: 'index.js',
libraryTarget: 'commonjs'
},
devtool: debug ? 'source-map' : false,
module: {
rules: [
{
test: /\.js$/,
use: {
loader: 'babel-loader',
options: {
babelrc: true,
compact: !debug
}
}
}
],
},
target: 'node',
plugins: [
new webpack.DefinePlugin({ 'global.GENTLY': false })
]
};
Note that you do not want to ignore the node_modules folder, since that would prevent static-linking.
The babel-polyfill plugin is also crucial if you want to use modern JS features.
Your actual handler code should have a named export that matches what you have set in the AWS console:
export const handler = (event, context, callback) => callback(null, 'OK');
Do not do it like this!
// Bad!
export default {
handler: (event, context, callback) => callback(null, 'OK'),
};
When packaging the code, make sure you add index.js to the top level of the zip:
zip -j bundle.zip ./out/index.js

Gulp task for ng-constant multiple environments

I have been trying to get this to work maybe I'm missing something. I am using ng-constant and setting up different environments end point as mentioned in the ng-constants issue
However I am using gulp and the configuration looks like
gulp.task('environmentsapi', function () {
return ngConstant({
stream: true,
development: {
constants: {
"ENV": {"api": "http://1.1.1.1:8082/"}
}
},
production: {
constants: {
"ENV": {"api": "https://productionapplink/"}
}
}
})
// Writes config.js to dist/ folder
.pipe(gulp.dest('dist/scripts/config'));
});
I cant figure out how to call the different end points in the different gulp tasks like the example in the link ngconstant:development etc. How can i run this within the task environmentsapi, since this task is shared in all environment builds. Please let me know how to do this.
gulp.task('build', function () {
runSequence('clean', ['sass', 'scripts', 'bower_components', 'environmentsapi' //How can I run ngconstant:development here? ], 'wiredep')
});
Simply create new tasks that set flags!
Here I'm using the development flag that defaults to true.
var development = true;
gulp.task('prod', function () {
development = false;
});
gulp.task('environmentsapi', function () {
const apiEndpoint = development ? 'http://1.1.1.1:8082/' : 'https://productionapplink/';
return ngConstant({
stream: true,
constants: {
'ENV': {api: apiEndpoint}
}
});
});
Now, using gulp build will build your application with the ENV.api set to 'http://1.1.1.1:8082/', your development endpoint.
And calling gulp prod build will make your output use an ENV.api set to 'https://productionapplink/'.
As discussed in the comments section, the solution above is quite perfect when you only have two environments, but it quickly gets out of hand when the number of environment grows.
In that case, I suggest using a different approach, the Pirate way, using yargs.
Here would be your new gulpfile.js:
const argv = require('yargs').argv;
const endpoints = {
'dev': 'http://1.1.1.1:8082/',
'prod-org': 'https://productionapplink.org/',
'prod-com': 'https://productionapplink.com/',
'prod-gov': 'https://productionapplink.gov/'
};
gulp.task('enviornmentsapi', function () {
const apiEnpdoint = typeof argv.env === 'undefined' ? endpoints.dev : endpoints[argv.env];
return ngConstant({
stream: true,
constants: {
ENV: { api: apiEnpdoint }
}
}).pipe(gulp.dest('dist/scripts/config'));
});
Use it like follows:
gulp build uses the default api URL: 'http://1.1.1.1:8082/'
gulp build --env=prod-org uses 'https://productionapplink.org/'
gulp build --env=prod-com uses 'https://productionapplink.com/'
I hope this could work for you this time!

Categories

Resources