I am using babel with jest for testing nodejs and instrumentation. The problem I am facing is babel is somehow caching(my guess) the transformed code and doesn't update even after changes in plugins code. The reasoning behind my guess is if I run jest with new test, it shows the change. But after the first time, it shows the same output, even though I have changed the custom plugin code in babel.
This is my babel.config.js
module.exports = function (api) {
const presets = [];
const plugins = [
[require(`./babel-instrumentor.js`)],
[require("#babel/plugin-transform-modules-commonjs").default],
];
/** this is just for minimal working purposes,
* for testing larger applications it is
* advisable to cache the transpiled modules in
* node_modules/.bin/.cache/#babel/register* */
api.cache(false);
return {
presets,
plugins,
};
};
And this is the babel-instrumentor.js
console.log("Loaded babel plugin");
module.exports = function ({ types: t }) {
return {
name: "log-functions",
visitor: {
Function: {
enter(path) {
let name = "anon";
if (path.node.id)
name = path.node.id.name;
console.log(path.node.body);
},
},
},
};
};
I also couldn't find any cache folder in node_modules which is supposed to be in
node_modules/.bin/.cache/#babel/register
Related
I have followed the following post in order to create a monorepo using yarn workspaces and craco.
It works really well except one thing: the errors/warnings of the common (components )library are not emitted to the console.
The structure is very simple:
monorepo
|-packages
|-components
|-fe
Fe is the main webApp that uses the components library.
The FE emits all warnings correctly, components does not.
How to make the shared component emit warnings/errors?
Updated:
Steps to reproduce in this repo:
https://github.com/sofoklisM/my-monorepo.git
What you need to change is the context option of the underlying ESLint Webpack plugin that is used by Create React App.
In this case I changed the context of ESLint to the root of the monorepo (yarn workspace root).
Here is an updated craco.config.js that should do the trick:
// craco.config.js
const path = require("path");
const { getLoader, loaderByName } = require("#craco/craco");
const { getPlugin, pluginByName } = require("#craco/craco/lib/webpack-plugins")
const absolutePath = path.join(__dirname, "../components");
module.exports = {
webpack: {
alias: {},
plugins: [],
configure: (webpackConfig, { env, paths }) => {
const { isFound, match } = getLoader(
webpackConfig,
loaderByName("babel-loader")
);
if (isFound) {
const include = Array.isArray(match.loader.include)
? match.loader.include
: [match.loader.include];
match.loader.include = include.concat([absolutePath]);
}
// Change context of ESLint Webpack Plugin
const { match: eslintPlugin } = getPlugin(webpackConfig, pluginByName("ESLintWebpackPlugin"));
eslintPlugin.options['context'] = path.join(__dirname, "../..");
return webpackConfig;
}
}
};
I've also made an updated fork of your reproduction repo here: https://github.com/ofhouse/stackoverflow-65447779
I'm using Webpack's [hash] for cache busting locale files. But I also need to hard-code the locale file path to load it from browser. Since the file path is altered with [hash], I need to inject this value to get right path.
I don't know how can get Webpack [hash] value programmatically in config so I can inject it using WebpackDefinePlugin.
module.exports = (env) => {
return {
entry: 'app/main.js',
output: {
filename: '[name].[hash].js'
}
...
plugins: [
new webpack.DefinePlugin({
HASH: ***???***
})
]
}
}
In case you want to dump the hash to a file and load it in your server's code, you can define the following plugin in your webpack.config.js:
const fs = require('fs');
class MetaInfoPlugin {
constructor(options) {
this.options = { filename: 'meta.json', ...options };
}
apply(compiler) {
compiler.hooks.done.tap(this.constructor.name, stats => {
const metaInfo = {
// add any other information if necessary
hash: stats.hash
};
const json = JSON.stringify(metaInfo);
return new Promise((resolve, reject) => {
fs.writeFile(this.options.filename, json, 'utf8', error => {
if (error) {
reject(error);
return;
}
resolve();
});
});
});
}
}
module.exports = {
// ... your webpack config ...
plugins: [
// ... other plugins ...
new MetaInfoPlugin({ filename: 'dist/meta.json' }),
]
};
Example content of the output meta.json file:
{"hash":"64347f3b32969e10d80c"}
I've just created a dumpmeta-webpack-plugin package for this plugin. So you might use it instead:
const { DumpMetaPlugin } = require('dumpmeta-webpack-plugin');
module.exports = {
...
plugins: [
...
new DumpMetaPlugin({
filename: 'dist/meta.json',
prepare: stats => ({
// add any other information you need to dump
hash: stats.hash,
})
}),
]
}
Please refer to the Webpack documentation for all available properties of the Stats object.
Seems like it should be a basic feature but apparently it's not that simple to do.
You can accomplish what you want by using wrapper-webpack-plugin.
plugins: [
new WrapperPlugin({
header: '(function (BUILD_HASH) {',
footer: function (fileName) {
const rx = /^.+?\.([a-z0-9]+)\.js$/;
const hash = fileName.match(rx)[1];
return `})('${hash}');`;
},
})
]
A bit hacky but it works — if u don't mind the entire chunk being wrapped in an anonymous function.
Alternatively you can just add var BUILD_HASH = ... in the header option, though it could cause problem if it becomes a global.
I created this plugin a while back, I'll try to update it so it provides the chunk hash naturally.
On server, you can get the hash by reading the filenames (example: web.bundle.f4771c44ee57573fabde.js) from your bundle folder.
You can pass the version to your build using webpack.DefinePlugin
If you have a package.json with a version, you can extract it like this:
const version = require("./package.json").version;
For example (we stringified the version):
new webpack.DefinePlugin({
'process.env.VERSION': JSON.stringify(version)
}),
then in your javascript, the version will be available as:
process.env.VERSION
The WebpackManifestPlugin is officially recommended in the output management guide. It writes a JSON to the output directory mapping the input filenames to the output filenames. Then you can inject those mapped values into your server template.
It's similar to Dmitry's answer, except Dmitry's doesn't appear to support multiple chunks.
That can be done with Webpack Stats Plugin. It gives you nice and neat output file with all the data you want. And it's easy to incorporate it to the webpack config files where needed.
E.g. To get hash generated by Webpack and use it elsewhere.
Could be achieved like:
# installation
npm install --save-dev webpack-stats-plugin
yarn add --dev webpack-stats-plugin
# generating stats file
const { StatsWriterPlugin } = require("webpack-stats-plugin")
module.exports = {
plugins: [
// Everything else **first**.
// Write out stats file to build directory.
new StatsWriterPlugin({
stats: {
all: false,
hash: true,
},
filename: "stats.json" // Default and goes straight to your output folder
})
]
}
# usage
const stats = require("YOUR_PATH_TO/stats.json");
console.log("Webpack's hash is - ", stats.hash);
More usage examples in their repo
Hope that helps!
The tl;dr is:
1) How can I have Jest use the native require function to load all modules in my tests anywhere.
2) Where / how would I go about modifying (ie replacing with the esm loader) https://github.com/standard-things/esm the require function in one place, before any tests run, so all tests will use the modified require.
I'd like to use the esm-loader with my Jest test files. In order to do so, I need to patch the require function globally, before any test code runs, with something like
require = require("#std/esm")(module, { esm: "js", cjs: true });
How do I tell Jest to execute that code before anything else is touched or requested?
I tried pointing both setupTestFrameworkScriptFile and an setupFiles array entry to a file with that in it, but neither worked (though I did confirm that both ran).
Alternatively, I'm firing off these tests with an npm script
"scripts": {
"test": "jest"
}
Is there some CLI magic whereby I can just load a module and then run jest?
Edit - the testEnvironment and resolver options make me wonder if this is ever even using the actual Node require function to load modules, or instead using its own module loader. If so I wonder if this is even possible.
So this one was a bit tough to get working. The solution is quite simple but it took me a while to get it working. The problem is that whenever you use any module in jest
Setup Files
Setup Framework Files
Test Files
Module files
They are all loaded in below way
({"Object.":function(module,exports,require,__dirname,__filename,global,jest){/*Module code inside*/
}});
If you have a look at node_modules/jest-runtime/build/index.js:495:510
const dirname = (_path || _load_path()).default.dirname(filename);
localModule.children = [];
localModule.parent = mockParentModule;
localModule.paths = this._resolver.getModulePaths(dirname);
localModule.require = this._createRequireImplementation(filename, options);
const transformedFile = this._scriptTransformer.transform(
filename,
{
collectCoverage: this._coverageOptions.collectCoverage,
collectCoverageFrom: this._coverageOptions.collectCoverageFrom,
collectCoverageOnlyFrom: this._coverageOptions.collectCoverageOnlyFrom,
isInternalModule,
mapCoverage: this._coverageOptions.mapCoverage },
this._cacheFS[filename]);
this._createRequireImplementation(filename, options); gives every module a custom require object. So you as such don't get the native require function at all, anywhere. Once jest has started every module loaded from then on will have jest's custom require function.
When we load a module, the requireModule methods from the jest-runtime gets called. Below is an excerpt from the same
moduleRegistry[modulePath] = localModule;
if ((_path || _load_path()).default.extname(modulePath) === '.json') {
localModule.exports = this._environment.global.JSON.parse(
(0, (_stripBom || _load_stripBom()).default)((_gracefulFs || _load_gracefulFs()).default.readFileSync(modulePath, 'utf8')));
} else if ((_path || _load_path()).default.extname(modulePath) === '.node') {
// $FlowFixMe
localModule.exports = require(modulePath);
} else {
this._execModule(localModule, options);
}
As you can see if the extension of the file is .node it loads the module directly, else it calls the _execModule. This function is the same code that I posted earlier which does the code transformation
const isInternalModule = !!(options && options.isInternalModule);
const filename = localModule.filename;
const lastExecutingModulePath = this._currentlyExecutingModulePath;
this._currentlyExecutingModulePath = filename;
const origCurrExecutingManualMock = this._isCurrentlyExecutingManualMock;
this._isCurrentlyExecutingManualMock = filename;
const dirname = (_path || _load_path()).default.dirname(filename);
localModule.children = [];
localModule.parent = mockParentModule;
localModule.paths = this._resolver.getModulePaths(dirname);
localModule.require = this._createRequireImplementation(filename, options);
Now when we want to modify require function for our test, we need _execModule to export our code directly. So the code should be similar to loading of a .node modules
} else if ((_path || _load_path()).default.extname(modulePath) === '.mjs') {
// $FlowFixMe
require = require("#std/esm")(localModule);
localModule.exports = require(modulePath);
} else {
But doing that would mean patching the code, which we want to avoid. So what we do instead is avoid using the jest command directly, and create our own jestload.js and running that. The code for loading jest is simple
#!/usr/bin/env node
/**
* Copyright (c) 2014-present, Facebook, Inc. All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
cli = require('jest/bin/jest');
Now we want to modify the _execModule before the cli loads. So we add below code
const jestRuntime = require("jest-runtime");
oldexecModule = jestRuntime.prototype._execModule;
jestRuntime.prototype._execModule = function (localModule, options) {
if (localModule.id.indexOf(".mjs") > 0) {
localModule.exports = require("#std/esm")(localModule)(localModule.id);
return localModule;
}
return oldexecModule.apply(this, [localModule, options]);
};
cli = require('jest/bin/jest');
Now time for a test
//__test__/sum.test.js
sum = require('../sum.mjs').sum;
test('adds 1 + 2 to equal 3', () => {
expect(sum(1, 2)).toBe(3);
});
test('adds 2 + 3 to equal 5', () => {
expect(sum(3, 2)).toBe(5);
});
And a sum.mjs file
export function sum (x, y) { return x + y }
Now we run the test
The solution is available on below repo
https://github.com/tarunlalwani/jest-overriding-require-function-stackoverflow
You can clone and test the solution by running npm test.
setupFiles worked for me. Add this in package.json:
"jest": {
"setupFiles": ["./my_file.js"]
},
https://jestjs.io/docs/en/configuration.html#setupfiles-array
I tried using node -r #std/esm run.js where run.js is just a script that calls jest, but it does not work and crashes here : https://github.com/facebook/jest/blob/master/packages/jest-runtime/src/script_transformer.js#L305.
From what I understand from this line means that it is not possible because jest compiles the module using the native vm module. The above lines (290):
if (willTransform) {
const transformedSource = this.transformSource(
filename,
content,
instrument,
!!(options && options.mapCoverage));
wrappedCode = wrap(transformedSource.code);
sourceMapPath = transformedSource.sourceMapPath;
} else {
is the code called when you are specifying transforms in your jest config.
Conclusion : until esm are supported ( and they will be under the .mjs extension ) you cannot import es modules in jest without specifying a transform. You could try to monkey patch vm but I would really advise against this option.
Specifying a jest transform is really not that hard, and for es modules it's really as simple as using babel-jest with the right babel config :
Below a package.json with minimal settings
{
"dependencies": {
"babel-jest": "^21.2.0",
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.0",
"jest": "^21.2.1"
},
"jest": {
"testMatch": [
"<rootDir>/src/**/__tests__/**/*.js?(x)",
"<rootDir>/src/**/?(*.)(spec|test).js?(x)"
],
"transform": {
"^.+\\.(js|jsx)$": "<rootDir>/node_modules/babel-jest"
},
"testEnvironment": "node",
"testURL": "http://localhost",
"moduleFileExtensions": [
"js",
"json"
]
},
"babel": {
"plugins": ["babel-plugin-transform-es2015-modules-commonjs"]
}
}
I'm using webpack to bundle server assets using the target property.
This results in a usable client bundle, and a usable server, which is working great. However it seems that even for the server code, webpack is bundling everything within node_modules. I am attempting to use webpack-node-externals to solve this problem, seen below:
module.exports = [
{
name: "server code, output to ./server",
entry: "./servertest.js",
output: {
filename: "./server/index.js"
},
target: "node",
externals: [
nodeExternals({
includeClientPackages: false
})
]
},
{
name: "client side, output to ./public",
entry: "./app.js",
output: {
filename: "./dist/app.js"
}
}
]
This doesn't work however as its default behavior is to exclude all node_modules from bundling, thus rendering the server useless. There is a whitelist option, for which I have included express, the only dependency of my small test case. It doesn't fail on express, however it fails on a dependency of express, merge-descriptors. And of course if I add merge-descriptors to the whitelist, trying to start the server will fail on another dependency of express. I surely cannot add every dependency and sub-dependency (etc etc) to this whitelist array.
How can I ensure all dependencies of a given requirement are bundled by webpack during a target: 'node' build?
To deal with this, I created a small helper, datwd to get a list of all subdependencies for specific packages:
// webpack.config.js
const nodeExternals = require('webpack-node-externals')
const includeSubdependencies = require('datwd')
module.exports = {
// ...
externals: [
nodeExternals({
// Will include "cookies" and its dependencies; for example:
// `['cookies', 'depd', 'keygrip', 'tsscmp']`
allowlist: includeSubdependencies(['cookies'])
})
]
It relies on npm ls under the hood. Source code:
/* eslint-disable global-require, import/no-dynamic-require */
const { execSync } = require('child_process')
/**
* Returns a flat array of all Node module dependency names for the entire
* dependency tree, optionally filtered by top-level modules. Requires NPM
* and for dependencies to be installed.
*
* #param {Array} moduleFilter - An optional array of top-level module names
* whose dependencies should be included. If specified, any other modules'
* dependencies will be excluded.
* #return {String[]} An array of module names
*/
const getAllDependencies = (moduleFilterInput = []) => {
const moduleFilter = Array.isArray(moduleFilterInput)
? moduleFilterInput
: [moduleFilterInput]
// Get the full dependency tree using NPM, excluding dev dependencies
// and peer dependencies.
const dependencyTree = JSON.parse(execSync('npm ls --prod --json').toString())
// Only get dependencies for specific top-level modules, if specified.
const dependencyTreeFiltered = moduleFilter.length
? {
...dependencyTree,
dependencies: Object.keys(dependencyTree.dependencies)
.filter((key) => moduleFilter.includes(key))
.reduce((obj, key) => {
// eslint-disable-next-line no-param-reassign
obj[key] = dependencyTree.dependencies[key]
return obj
}, {}),
}
: dependencyTree
const allChildDeps = []
const getAllChildDependencies = (depTree) => {
const nextDeps = depTree.dependencies
if (!nextDeps || !Object.keys(nextDeps).length) {
return []
}
Object.entries(nextDeps).forEach(([childDep, childDepTree]) => {
allChildDeps.push(childDep)
getAllChildDependencies(childDepTree)
})
return allChildDeps
}
return getAllChildDependencies(dependencyTreeFiltered)
}
I have been trying to get this to work maybe I'm missing something. I am using ng-constant and setting up different environments end point as mentioned in the ng-constants issue
However I am using gulp and the configuration looks like
gulp.task('environmentsapi', function () {
return ngConstant({
stream: true,
development: {
constants: {
"ENV": {"api": "http://1.1.1.1:8082/"}
}
},
production: {
constants: {
"ENV": {"api": "https://productionapplink/"}
}
}
})
// Writes config.js to dist/ folder
.pipe(gulp.dest('dist/scripts/config'));
});
I cant figure out how to call the different end points in the different gulp tasks like the example in the link ngconstant:development etc. How can i run this within the task environmentsapi, since this task is shared in all environment builds. Please let me know how to do this.
gulp.task('build', function () {
runSequence('clean', ['sass', 'scripts', 'bower_components', 'environmentsapi' //How can I run ngconstant:development here? ], 'wiredep')
});
Simply create new tasks that set flags!
Here I'm using the development flag that defaults to true.
var development = true;
gulp.task('prod', function () {
development = false;
});
gulp.task('environmentsapi', function () {
const apiEndpoint = development ? 'http://1.1.1.1:8082/' : 'https://productionapplink/';
return ngConstant({
stream: true,
constants: {
'ENV': {api: apiEndpoint}
}
});
});
Now, using gulp build will build your application with the ENV.api set to 'http://1.1.1.1:8082/', your development endpoint.
And calling gulp prod build will make your output use an ENV.api set to 'https://productionapplink/'.
As discussed in the comments section, the solution above is quite perfect when you only have two environments, but it quickly gets out of hand when the number of environment grows.
In that case, I suggest using a different approach, the Pirate way, using yargs.
Here would be your new gulpfile.js:
const argv = require('yargs').argv;
const endpoints = {
'dev': 'http://1.1.1.1:8082/',
'prod-org': 'https://productionapplink.org/',
'prod-com': 'https://productionapplink.com/',
'prod-gov': 'https://productionapplink.gov/'
};
gulp.task('enviornmentsapi', function () {
const apiEnpdoint = typeof argv.env === 'undefined' ? endpoints.dev : endpoints[argv.env];
return ngConstant({
stream: true,
constants: {
ENV: { api: apiEnpdoint }
}
}).pipe(gulp.dest('dist/scripts/config'));
});
Use it like follows:
gulp build uses the default api URL: 'http://1.1.1.1:8082/'
gulp build --env=prod-org uses 'https://productionapplink.org/'
gulp build --env=prod-com uses 'https://productionapplink.com/'
I hope this could work for you this time!