So, I have a directory:
mods/
-core/
--index.js
--scripts/
---lots of stuff imported by core/index
This works with typical rollup fashion if you want to rollup to for example mods/core/index.min.js
But I have many of these mods/**/ directories and I want to take advantage of the fact that they are rollup'd into iifes. Each mods/**/index.js will, rather than export, assign to a global variable that we presume is provided:
mods/core/index.js
import ui from './scripts/ui/'
global.ui = ui
mods/someMod/scripts/moddedClass.js
export default class moddedClass extends global.ui.something { /* some functionality extension */}
mods/someMod/index.js
import moddedClass from './scripts/moddedClass'
global.ui.something = moddedClass
So hopefully you can see how each mod directory can be rollup'd in typical fashion but, I need to then put the actual iifes inside another one so that:
mods/compiled.js
(function compiled() {
const global = {};
(function core() {
//typical rollup iife
})();
(function someMod() {
//typical rollup iife
})();
//a footer like return global or global.init()
})();
Any help towards this end would be greatly appreciated. The simplest possible answer, I think, is how I can simply get a string value for each mod's iife instead of rollup writing it to a file.
At that point I could just iterate the /mods/ directory, in an order specified by some modlist.json or something, and call rollup on each /mod/index.js, then build the outer iife myself from strings.
However, I suppose this would not be a full solution for sourcemapping? Or can multiple inline sourcemaps be included? With source mapping in mind, I wonder if another build step might be necessary, where each mod is transpiled before this system even gets to it.
Use rollup's bundle.generate api to generate multiple iifes and write them into one file using fs.appendFile.
For the sourcemaps you can use this module(it's from the same author of rollup) https://github.com/rich-harris/sorcery
Okay so the way I ended up solving this was using source-map-concat
It basically does what I described, right out of the box. The only thing I had to do was to asynchronously iterate the mod directory and rollup each mod, before passing the results to source-map-concat, since rollup.rollup returns a Promise.
I also ended up wanting in-line sourcemaps so that the code can be directly injected rather than written to a file, so I used convert-source-map for that.
The only issue left to solve is sub-source mapping. Sorcery would work great for that, if I was generating files, but I would like to keep it as string sources. For now it will at least show me what mod an error came from, but not the sub-file it came from. If anyone has info on how to do a sorcery-style operation on strings let me know.
Here's the relevant final code from my file:
const rollup = require("rollup")
const concat = require("source-map-concat")
const convert = require("convert-source-map")
const fs = require("fs")
const path = require("path")
const modsPath = path.join(__dirname, "mods")
const getNames = _ => JSON.parse(fs.readFileSync(path.join(modsPath, "loadList.json"), "utf8"))
const wrap = (node, mod) => {
node.prepend("\n// File: " + mod.source + "\n")
}
const rolls = {}
const bundles = {}
const rollupMod = (modName, after) => {
let dir = path.join(modsPath, modName),
file = path.join(dir, "index.js")
rollup.rollup({
entry: file,
external: "G",
plugins: []
}).then(bundle => {
rolls[modName] = bundle.generate({
format: "iife",
moduleName: modName,
exports: "none",
useStrict: false,
sourceMap: true
})
after()
})
}
const rollupMods = after => {
let names = getNames(), i = 0,
rollNext = _ => rollupMod(names[i++], _ => i < names.length - 1? rollNext() : after())
rollNext()
}
const bundleCode = after => {
rollupMods(_ => {
let mods = concat(getNames().map(modName => {
let mod = rolls[modName]
return {
source: path.join(modsPath, modName),
code: mod.code,
map: mod.map
}
}), {
delimiter: "\n",
process: wrap
})
mods.prepend("(function(){\n")
mods.add("\n})();")
let result = mods.toStringWithSourceMap({
file: path.basename('.')
})
bundles.code = result.code + "\n" + convert.fromObject(result.map).toComment()
after(bundles.code)
})
}
exports.bundleCode = bundleCode
Related
I am parsing JSX (React component) to find all nodes that:
belong to JSXText type
Node has not empty parameter text
This is my first attempt to parse the component source file, with a test how to get Literal node.
const acorn = require("acorn")
const jsx = require("acorn-jsx")
const cf = require("acorn-class-fields")
const walk = require("acorn-walk")
let s = acorn.Parser.extend(cf).extend(jsx()).parse(cnt, {ecmaVersion: "latest", sourceType: "module"});
walk.simple(s, {
Literal(node) {
console.log(`Literal: ${node.value}`)
}
})
Question
I found nodes types here but there aren't JSX node types - where could be found?
How to filter nodes with not empty text parameter?
I believe you can extend acorn-walk to include the extra types.
This plugin works for me: https://github.com/sderosiaux/acorn-jsx-walk
At the time of writing, the following is how i would implement it:
const acorn = require("acorn");
const jsx = require("acorn-jsx");
const walk = require("acorn-walk");
const { extend } = require('acorn-jsx-walk')
// setup
const parser = acorn.Parser.extend(jsx());
extend(walk.base);
// parse
const ast = parser.parse(cnt, { ecmaVersion: "latest", sourceType: 'module' });
// analys
walk.simple(ast, {
JSXText(node) {
console.log(node.expression)
}
});
I'm building a tool that will clean up a JSON file containing localization strings if they are no longer in use in the source code.
First, I parse the localization file into an array with all the id's that are (or no longer are) used in the source code to get the string value in the right language.
so I have an array looking something like this:
const ids = ['home.title', 'home.description', 'menu.contact', 'menu.social'];
etc. you get the point.
I'm using node.js fs promisified readFile and glob to search .js source code files like this:
const jsFiles = await globbing('./**/*.js', {cwd: directory, ignore: './**/*test.js'});
const results = jsFiles.map(async file => {
const filePath = path.join(directory, file);
return readFile(filePath, 'utf8').then((data) => {
// handle match here
}).catch(console.log);
});
I also have Ramda available for fancy list/collection functions, but no other libraries.
So, I will be able to loop through the ids array and for each item scan the entire source code for a match with the function from above. But that seems a bit overkill to scan the entire source code times ids.length. The ids array is on around 400 ids' and the source code is hundreds of large files.
To avoid O(M*N), is there a way to match the entire array with the entire source code, and discard the not matched array items? Or what would be the best practice here?
current solution:
const cleanLocal = async () => {
const localIdList = Object.keys(await getLocalMap());
const matches = [];
localIdList.map(async id => {
const directory = path.join(__dirname, '..');
const jsFiles = await globbing('./**/*.js', {cwd: directory, ignore: './**/*test.js'});
jsFiles.map(async file => {
const filePath = path.join(directory, file);
return readFile(filePath, 'utf8').then((data) => {
if (data.indexOf(id) >= 0) {
console.log(id);
matches.push(id);
}
}).catch(console.log);
});
});
};
You can't avoid the O(M*N) complexity in this case.
However, to improve performance you can switch the order of your operations: first loop over the files and then loop over the array. This is because looping over the files is a costly IO operation, while looping over the array is a fast memory operation.
In your code, you have M memory operations and M*N IO (filesystem) operations.
If you first loop over the files, you would have N IO operations and M*N memory operations.
As it is not possible to avoid O(M*N) in this case I have only been able to optimize this search function by looping through the source files once and then over the ids' for each file as proposed by #mihai as an optimization opportunity.
The end result looks like this:
const cleanLocal = async () => {
const localIdList = Object.keys(await getLocalMap()); // ids' array
const matches = [];
const directory = path.join(__dirname, '..');
const jsFiles = await globbing('./**/*.js', {cwd: directory, ignore: './**/*test.js'}); // list of files to scan
const results = jsFiles.map(async file => {
const filePath = path.join(directory, file);
return readFile(filePath, 'utf8').then((data) => {
localIdList.map(id => {
if (R.contains(id, data)) { // R = ramda.js
matches.push(id);
}
});
}).catch(console.log);
});
await Promise.all(results);
console.log('matches: ' + R.uniq(matches).length);
console.log('in local.json: ' + localIdList.length);
};
Please let me know if there are any other way to optimize this.
Goal: Get a List of Absolute Paths for all files in a directory recursively leveraging NodeJs.
Info: As a python dev, I normally use the python packages which handle this in a platform independent fashion. My boss wanted some javascript code which would handle this goal... and as a JS dev previously, I was like "oh this is easy. Let's look up the node as I never got a chance to get my hands dirty with it." but I seem to be mistaken.
I don't see anything in node relating to Dir Walking, or a way I could hack together to create such a thing.
I was looking in "Child Process", "Console", "File System", "OS", "Path", and "Process". I didn't see anything which would do anything akin to:
pushd .
cd $dir
for folder in $(ls);
do
pushd .
cd $folder
//call again
ls $(pwd)$flag >> $dir/files_matching.txt
popd
done;
// or any platform independent means of recursively getting
// all files and their abs path which meet flag criterion,
// such as "*.txt" || "_*found*"
I could use child process to carry out Command Line items, but then I need to create a bunch of conditionals based on the OS consuming the app, and figured this would be something which already exists.
I don't want to reinvent the wheel, but figured this has already been done; I just don't see it in the base modules.
Is there a node module I would need which accomplishes this, which is outside of the base modules?
I am trying not to have to hand roll a conditional os based system to get an exhaustive list of abs paths for all files under a directory (or subset due to extensions, etc.)
I'd do it like this:
synchronous:
const fs = require("fs");
const { resolve } = require("path");
const getFiles = dir => {
const stack = [resolve(dir)];
const files = [];
while (stack.length) {
dir = stack.pop();
fs.readdirSync(dir).forEach(item => {
const path = resolve(dir, item);
(fs.statSync(path).isDirectory() ? stack : files).push(path);
});
}
return files;
};
console.log(getFiles("."));
asynchronous:
const fs = require("fs");
const { resolve } = require("path");
const pify = require("util").promisify;
const readdir = pify(fs.readdir);
const stat = pify(fs.stat);
const getFiles = async dir => {
const files = await readdir(resolve(dir));
const filesP = files.map(async file => {
const path = resolve(dir, file);
return (await stat(path)).isDirectory() ? getFiles(path) : path;
});
// return (await Promise.all(filesP)).flat(); // flat supported in node ~11
return [].concat(...(await Promise.all(filesP)));
};
getFiles(".").then(console.log);
async demo https://repl.it/#marzelin/getFiles
So, I was looking at the filesystem module and noticed the function readDir
https://nodejs.org/dist/latest-v8.x/docs/api/fs.html#fs_fs_readdir_path_options_callback
which does the trick in part. I guess it wasnt named a method i would have looking for. I was looking for things involving LIST and DIR, but not READ.
Anyways, here is a way to read Dir.
var fs = require('fs');
if (process.argv.length <= 2) {
console.log("Usage: " + __filename + " path/to/directory");
process.exit(-1);
}
var path = process.argv[2];
fs.readdir(path, function(err, items) {
console.log(items);
for (var i=0; i<items.length; i++) {
console.log(items[i]);
}
});
You notice that this one above is Async, but there is a Sync variant, just add "Sync" to the signature. Now you need to determine if something is a directory:
let file = fs.statSync("path/to/directory")
let isDir = file.isDirectory()
So you can couple this all together.
var fs = require('fs')
function recurse_file_system(path, contains) {
let files = fs.readdirSync(path);
let dArr = [];
let fArr = [];
for (let i in files){
let newPath = path + "/" + files[i]
if (fs.statSync(newPath).isDirectory()){
dArr.push(newPath)
}else{
if (filter(files[i], ".txt")){
fArr.push(newPath)
}
}
}
if (arr.length == 0){
return fArr;
}else{
for (let d in dArr){
let rslt = recurse_file_system(dArr[d]);
for (let i in rslt){
fArr.push(rslt[i])
}
}
return fArr;
}
}
console.log("Files:")
console.log(recurse_file_system("/"))
Now if you want to extend this, all you need to do is add a filter to say, limit the size of returns based on particular criterion, such as file name limitation.
function filter(filename, contains){
let reg = new RegEx(contains)
return reg.test(filename)
}
and you can add it to the base case, where you see filter... OR you can just return the WHOLE set and filter it there with the List method, filter.
I know some inner functionality inside Webpack. Something about dependencies, template, and module building. However, there is little comment inside its source and no full document site for now. So, i can't chain them all to deal with my problem.
With my current requirement, i need to render specific module with custom source template (similar to this MultiModule in webpack).
Note: To be clear, the generated module's dependency array is not static. For example, one time it may be ['./a', './b', './c'], another time it may be ['./b', './c','./d']. That is up to some dynamic config before build.
For more detail example, i need a module call main.js. In build time, it need to be dynamically generated with target dependencies like(for being not sure which modules would be dependencies):
// main.js
var a = require('./a')
var b = require('./b')
var c = require('./c')
var d = require('./d')
...
In fact, if i only need to dynamically require them all, i can just construct an entry point dynamically.
// webpack.config.js
{
entry: {
main: [
'./a',
'./b',
'./c',
...
]
},
}
and it(webpack) will generate a module may like this:
__webpack_require__(1);
__webpack_require__(2);
__webpack_require__(3);
return __webpack_require__(4);
But i need to do something more:
var a = __webpack_require__(1);
var b = __webpack_require__(2);
var c = __webpack_require__(3);
var d = __webpack_require__(4);
...
// do something with a,b,c,d... under my custom need
...
return somthing or nothing;
As you guys who know about webpack, it's very very complicated and hard to understand and track its plugin(event) hierarchy.
Need some expertise! :)
I'm sorry foy my unclear question before.
However, there is some kind of weird atmosphere. I set up a bounty for attention and guidance. Someone's free-minded answer drove me to make comment with impoliteness somehow. And then some peacemaker shows up with comments unrelated to the question or answer. That sucks.
Focusing on that thing just makes things worse and nothing helped. Not letting it go just means someone has petty mind.
Either lacking attention and lacking expert or not, I have to fight it myself. Fortunately, digging into webpack makes some progress.
Prerequisite
The day before popular webpack, there are fashions like grunt and gulp to construct a custom build flow (with their plugins). They can achieve most of custom requirement, especially generating a custom module(which webpack doesn't have obvious and direct way to deal with).
when you come to do something like automatic collecting custom dependencies, then generating a custom module is the next essential step. It can be commonly seen in product line/family design.
Solutions
#1
This is the simplest and direct way but lack of flexibility.
The source method of MultiModule is to generate the entry module with multi-dependencies. Just overriding it will hit the target.
// hack.js
const MultiModule = require('webpack/lib/MultiModule')
MultiModule.prototype.source = function(dependencyTemplates, outputOptions) {
var str = ['"hello world";\n'];
this.dependencies.forEach(function (dep, idx) {
if (dep.module) {
if (idx === this.dependencies.length - 1)
str.push("module.exports = ");
str.push("__webpack_require__(");
if (outputOptions.pathinfo)
str.push("/*! " + dep.request + " */");
str.push("" + JSON.stringify(dep.module.id));
str.push(")");
} else {
str.push("(function webpackMissingModule() { throw new Error(");
str.push(JSON.stringify("Cannot find module \"" + dep.request + "\""));
str.push("); }())");
}
str.push(";\n");
}, this);
return new RawSource(str.join(""));
}
At the fifth line, i add a string statement "hello world;"\n, nothing else changed.
module.exports = {
entry: {
main: ["./a", "./b"],
}
// something else
}
the output main.js may look like:
//...
/* 0 */
/*!******************!*\
!*** multi main ***!
\******************/
/***/ function(module, exports, __webpack_require__) {
"hello world";
__webpack_require__(/*! ./a */1);
module.exports = __webpack_require__(/*! ./b */2);
/***/ }
//...
Now we can do what we want with the source method, with the compatibility in mind.
#2
This way is much more flexible but also complex.
It requires at lease 5 files(sources are too long, I made them into snippets):
CustomMultiModule.js:
// CustomMultiModule.js
const MultiModule = require('webpack/lib/MultiModule')
const RawSource = require('webpack/lib/RawSource')
class CustomMultiModule extends MultiModule {
constructor(...args) {
super(...args)
}
source(dependencyTemplates, outputOptions) {
var str = ['"hello world";'];
this.dependencies.forEach(function(dep, idx) {
if (dep.module) {
if (idx === this.dependencies.length - 1)
str.push("module.exports = ");
str.push("__webpack_require__(");
if (outputOptions.pathinfo)
str.push("/*! " + dep.request + " */");
str.push("" + JSON.stringify(dep.module.id));
str.push(")");
} else {
str.push("(function webpackMissingModule() { throw new Error(");
str.push(JSON.stringify("Cannot find module \"" + dep.request + "\""));
str.push("); }())");
}
str.push(";\n");
}, this);
return new RawSource(str.join(""));
}
}
module.exports = CustomMultiModule
CustomMultiModuleFactory.js:
// CustomMultiModuleFactory.js
const MultiModuleFactory = require('webpack/lib/MultiModuleFactory')
const CustomMultiModule = require('./CustomMultiModule')
class CustomMultiModuleFactory extends MultiModuleFactory {
constructor() {
super()
}
create(context, dependency, callback) {
callback(null, new CustomMultiModule(context, dependency.dependencies, dependency.name));
};
}
module.exports = CustomMultiModuleFactory
CustomMultiEntryPlugin.js:
// CustomMultiEntryPlugin.js
const MultiEntryPlugin = require('webpack/lib/MultiEntryPlugin')
const MultiEntryDependency = require('webpack/lib/dependencies/MultiEntryDependency')
const CustomMultiModuleFactory = require('./CustomMultiModuleFactory')
class CustomMultiEntryPlugin extends MultiEntryPlugin {
constructor(context, entries, name) {
super(context, entries, name)
}
apply(compiler) {
compiler.plugin('after-plugins', function(compiler) {
compiler.plugin("compilation", function(compilation, params) {
var multiModuleFactory = new CustomMultiModuleFactory();
compilation.dependencyFactories.set(MultiEntryDependency, multiModuleFactory);
})
})
}
}
module.exports = CustomMultiEntryPlugin
CustomEntryOptionPlugin.js:
// CustomEntryOptionPlugin.js
const CustomMultiEntryPlugin = require('./CustomMultiEntryPlugin')
class CustomEntryOptionPlugin {
constructor() {}
apply(compiler) {
compiler.plugin("entry-option", function(context, entry) {
if (typeof entry === "object") {
Object.keys(entry).forEach(function(name) {
if (Array.isArray(entry[name])) {
compiler.apply(new CustomMultiEntryPlugin(context, entry[name], name));
}
});
}
});
}
}
module.exports = CustomEntryOptionPlugin
webpack.config.js:
// webpack.config.js
const CustomEntryOptionPlugin = require('./CustomEntryOptionPlugin')
module.exports = {
entry: {
main: ["./a", "/b"] // this dependencies array may be generated
...
},
output: {
path: path.join(__dirname, "js"),
pathinfo: true,
filename: "[name].[chunkhash].js",
chunkFilename: "[chunkhash].js"
}
plugins: [
new CustomEntryOptionPlugin(),
...
]
...
};
With the code above, we can achieve the same as #1. And we can gain more control over the target entry or other requirements, if we want.
Often in webpack you're only requiring one file, and maybe different libs that the files depend on. If you require main, then webpack is going to resolve the dependencies based on the CommonJS syntax which you can read about here. Does removing the extra requirements in your webpack.config.js file solve this? e.g. having only the following as the config:
// webpack.config.js
{
entry: [ "./main" ],
...
}
It sounds like you don't really understand how webpack works-- the idea of it is to emulate how Node's CommonJS syntax allows your javascript to be modular and placed in separate files, while also being performant and not requiring tons of AJAX requests by your browser. If you want to read more about Webpack's config file, check out this page.
As a side note, returning at the end of the module does absolutely nothing. If you want to export, you can use module.exports, but having a line like return true or something at the end of your main.js file doesn't get caught anywhere meaningful.
How do I require() / import modules from the console? For example, say I've installed the ImmutableJS npm, I'd like to be able to use functions from the module while I'm working in the console.
Here's another more generic way of doing this.
Requiring a module by ID
The current version of WebPack exposes webpackJsonp(...), which can be used to require a module by ID:
function _requireById(id) {
return webpackJsonp([], null, [id]);
}
or in TypeScript
window['_requireById'] =
(id: number): any => window['webpackJsonp'];([], null, [id]);
The ID is visible at the top of the module in the bundled file or in the footer of the original source file served via source maps.
Requiring a module by name
Requiring a module by name is much trickier, as WebPack doesn't appear to keep any reference to the module path once it has processed all the sources. But the following code seems to do the trick in lot of the cases:
/**
* Returns a promise that resolves to the result of a case-sensitive search
* for a module or one of its exports. `makeGlobal` can be set to true
* or to the name of the window property it should be saved as.
* Example usage:
* _requireByName('jQuery', '$');
* _requireByName('Observable', true)´;
*/
window['_requireByName'] =
(name: string, makeGlobal?: (string|boolean)): Promise<any> =>
getAllModules()
.then((modules) => {
let returnMember;
let module = _.find<any, any>(modules, (module) => {
if (_.isObject(module.exports) && name in module.exports) {
returnMember = true;
return true;
} else if (_.isFunction(module.exports) &&
module.exports.name === name) {
return true;
}
});
if (module) {
module = returnMember ? module.exports[name] : module.exports;
if (makeGlobal) {
const moduleName = makeGlobal === true ? name : makeGlobal as string;
window[moduleName] = module;
console.log(`Module or module export saved as 'window.${moduleName}':`,
module);
} else {
console.log(`Module or module export 'name' found:`, module);
}
return module;
}
console.warn(`Module or module export '${name}'' could not be found`);
return null;
});
// Returns promise that resolves to all installed modules
function getAllModules() {
return new Promise((resolve) => {
const id = _.uniqueId('fakeModule_');
window['webpackJsonp'](
[],
{[id]: function(module, exports, __webpack_require__) {
resolve(__webpack_require__.c);
}},
[id]
);
});
}
This is quick first shot at this, so it's all up for improvement!
Including this in a module will allow require([modules], function) to be used from a browser
window['require'] = function(modules, callback) {
var modulesToRequire = modules.forEach(function(module) {
switch(module) {
case 'immutable': return require('immutable');
case 'jquery': return require('jquery');
}
})
callback.apply(this, modulesToRequire);
}
Example Usage:
require(['jquery', 'immutable'], function($, immutable) {
// immutable and $ are defined here
});
Note: Each switch-statement option should either be something this module already requires, or provided by ProvidePlugin
Sources:
Based on this answer, which can be used to add an entire folder.
Alternative method from Webpack Docs - which allows something like require.yourModule.function()
I found a way that works, for both WebPack 1 and 2. (as long as the source is non-minified)
Repo: https://github.com/Venryx/webpack-runtime-require
Install
npm install --save webpack-runtime-require
Usage
First, require the module at least once.
import "webpack-runtime-require";
It will then add a Require() function to the window object, for use in the console, or anywhere in your code.
Then just use it, like so:
let React = Require("react");
console.log("Retrieved React.Component: " + React.Component);
It's not very pretty (it uses regexes to search the module wrapper functions) or fast (takes ~50ms the first call, and ~0ms after), but both of these are perfectly fine if it's just for hack-testing in the console.
Technique
The below is a trimmed version of the source to show how it works. (see the repo for the full/latest)
var WebpackData;
webpackJsonp([],
{123456: function(module, exports, __webpack_require__) {
WebpackData = __webpack_require__;
}},
[123456]
);
var allModulesText;
var moduleIDs = {};
function GetIDForModule(name) {
if (allModulesText == null) {
let moduleWrapperFuncs = Object.keys(WebpackData.m).map(moduleID=>WebpackData.m[moduleID]);
allModulesText = moduleWrapperFuncs.map(a=>a.toString()).join("\n\n\n");
// these are examples of before and after webpack's transformation: (which the regex below finds the var-name of)
// require("react-redux-firebase") => var _reactReduxFirebase = __webpack_require__(100);
// require("./Source/MyComponent") => var _MyComponent = __webpack_require__(200);
let regex = /var ([a-zA-Z_]+) = __webpack_require__\(([0-9]+)\)/g;
let matches = [];
let match;
while (match = regex.exec(allModulesText))
matches.push(match);
for (let [_, varName, id] of matches) {
// these are examples of before and after the below regex's transformation:
// _reactReduxFirebase => react-redux-firebase
// _MyComponent => my-component
// _MyComponent_New => my-component-new
// _JSONHelper => json-helper
let moduleName = varName
.replace(/^_/g, "") // remove starting "_"
.replace(new RegExp( // convert chars where:
"([^_])" // is preceded by a non-underscore char
+ "[A-Z]" // is a capital-letter
+ "([^A-Z_])", // is followed by a non-capital-letter, non-underscore char
"g"),
str=>str[0] + "-" + str[1] + str[2] // to: "-" + char
)
.replace(/_/g, "-") // convert all "_" to "-"
.toLowerCase(); // convert all letters to lowercase
moduleIDs[moduleName] = parseInt(id);
}
}
return moduleIDs[name];
}
function Require(name) {
let id = GetIDForModule(name);
return WebpackData.c[id].exports;
}
Being able to use require modules in the console is handy for debugging and code analysis. #psimyn's answer is very specific so you aren't likely to maintain that function with all the modules you might need.
When I need one of my own modules for this purpose, I assign a window property to it so I can get at it e.g window.mymodule = whatever_im_exporting;. I use the same trick to expose a system module if I want to play with it e.g:
myservice.js:
let $ = require('jquery');
let myService = {};
// local functions service props etc...
module.exports = myService;
// todo: remove these window prop assignments when done playing in console
window.$ = $;
window.myService = myService;
It is still a bit of a pain, but digging into the bundles, I can't see any way to conveniently map over modules.
The answer from #Rene Hamburger is good but unfortunately doesn't work anymore (at least with my webpack version). So I updated it:
function getWebpackInternals() {
return new Promise((resolve) => {
const id = 'fakeId' + Math.random();
window['webpackJsonp'].push(["web", {
[id]: function(module, __webpack_exports__, __webpack_require__) {
resolve([module, __webpack_exports__, __webpack_require__])
}
},[[id]]]);
});
}
function getModuleByExportName(moduleName) {
return getWebpackInternals().then(([_, __webpack_exports__, __webpack_require__]) => {
const modules = __webpack_require__.c;
const moduleFound = Object.values(modules).find(module => {
if (module && module.exports && module.exports[moduleName]) return true;
});
if (!moduleFound) {
console.log('couldnt find module ' + moduleName);
return;
}
return moduleFound.exports[moduleName];
})
}
getModuleByExportName('ExportedClassOfModule');
expose-loader is, in my opinion, a more elegant solution:
require("expose-loader?libraryName!./file.js");
// Exposes the exports for file.js to the global context on property "libraryName".
// In web browsers, window.libraryName is then available.
Adding the below code to one of your modules will allow you to load modules by id.
window.require = __webpack_require__;
In the console use the following:
require(34)
You could do something similar as psimyn advised by
adding following code to some module in bundle:
require.ensure([], function () {
window.require = function (module) {
return require(module);
};
});
Use require from console:
require("./app").doSomething();
See more
After making an npm module for this (see my other answer), I did a search on npms.io and seem to have found an existing webpack-plugin available for this purpose.
Repo: https://www.npmjs.com/package/webpack-expose-require-plugin
Install
npm install --save webpack-expose-require-plugin
Usage
Add the plugin to your webpack config, then use at runtime like so:
let MyComponent = require.main("./path/to/MyComponent");
console.log("Retrieved MyComponent: " + MyComponent);
See package/repo readme page for more info.
EDIT
I tried the plugin out in my own project, but couldn't get it to work; I kept getting the error: Cannot read property 'resource' of undefined. I'll leave it here in case it works for other people, though. (I'm currently using the solution mentioned above instead)
After both making my own npm package for this (see here), as well as finding an existing one (see here), I also found a way to do it in one-line just using the built-in webpack functions.
It uses WebPack "contexts": https://webpack.github.io/docs/context.html
Just add the following line to a file directly in your "Source" folder:
window.Require = require.context("./", true, /\.js$/);
Now you can use it (eg. in the console) like so:
let MyComponent = Require("./Path/To/MyComponent");
console.log("Retrieved MyComponent: " + MyComponent);
However, one important drawback of this approach, as compared to the two solutions mentioned above, is that it seems to not work for files in the node_modules folder. When the path is adjusted to "../", webpack fails to compile -- at least in my project. (perhaps because the node_modules folder is just so massive)