Is there a way I can get Babel to output the AST of a file, as a JSON or similar, rather than condense it back into JS?
The reason is that I want to be able to do some simple static analysis / code gen, and while I aim to eventually do it within a plugin for Babel (or similar), I feel it would simplify things significantly if I can start with a static model.
There's babylon, babel's own parser:
npm install -g babylon
babylon your_file.js > ast.json
Node API example and source:
https://github.com/babel/babel/tree/master/packages/babylon
Also the babel plugin handbook might come in handy for AST reference, and to get started with plugin development.
you should check out ast-source - it can take babel as a parser when it builds the tree.
Example from their npmjs page:
import ASTSource from "ast-source"
import estraverse from "estraverse"
import fs from "fs"
function transform(AST) {
var replaced = {
"type": "babel",
"value": 42,
"raw": "42"
};
return estraverse.replace(AST, {
enter: function (node) {
if (node.type === estraverse.Syntax.Literal) {
return replaced;
}
}
});
}
var source = new ASTSource(fs.readFileSync("./input.js", "utf-8"), {
filePath: "./input.js"
});
var output = source.transform(transform).output();
console.log(output.code);// => "var a = 42;"
console.dir(output.map.toString()); // => source map
fs.writeFileSync("./output.js", output.codeWithMap, "utf-8");
Related
In my project, I need to do cache busting, since after a new deploy, the browser often only reloads the HTML but not the JS & CSS files.
Currently, I am not building the HTML in any way, it just already sits in the public directory.
The simplest approach seems to be to add a timestamp to the JS reference:
<script type="module" src="bundle/index.js?ts=20201026-102300"></script>
Now, what is the best way to achieve this in a project that already uses rollup.js?
I have seen #rollup/plugin-html, yet I'm puzzled by the example in its documentation, as it takes a JS file as input:
input: 'src/index.js',
What JS file should that be?
Instead I expect that need to define
an input HTML file
some space for code to set the timestamp variable
an output HTML file
So what's the best way to do this, be it with #rollup/plugin-html or with another approach?
Came here looking for an answer to this question myself and a few moments later and a bit of regex fiddling, I got it to work.
Note: this solution edits your HTML file each time you build it. There is no input (template) HTML and output HTML.
Install rollup-plugin-replace-html-vars
npm install rollup-plugin-replace-html-vars --save-dev
Add this piece of config to your rollup.config.js file
// rollup.config.js
// ...
plugins: [
replaceHtmlVars({
files: '**/index.html',
from: /\.\/\w+\/\w+\.\w+.\w+\?v=\d+/g,
to: './dist/app.min.js?v=' + Date.now(),
}),
]
In your index.html, add this reference to the app.js:
<script type="module" src="./dist/app.min.js?v=1630086943272"></script>
Run rollup and the reference to app.js in your index.html will have a timestamp of the build time each time you run it.
Bonus:
If you don't have a .min in your filename, use this regex instead:
/\.\/\w+\/\w+\.\w+\?v=\d+/g
Full disclosure; I'm no regex wizard, just managed to hack this one together. I bet someone here will have a better way of capturing ./dist/app.min.js?v=1630086943272 with a regex but this works for my solution.
I went with using file hashes, which means it's only reloaded when there is a new version for that file.
For that, I wrote my own utility:
function escapeStringRegexp(string) {
if (typeof string !== 'string') {
throw new TypeError('Expected a string');
}
return string
.replace(/[|\\{}()[\]^$+*?.]/g, '\\$&')
.replace(/-/g, '\\x2d');
}
function insertHashToFile(options) {
return {
writeBundle(outputOptions) {
const outputDir = outputOptions.dir ? outputOptions.dir : path.dirname(outputOptions.file);
let indexHtml = fs.readFileSync(options.htmlFile, 'utf8');
for (const sourceFile of options.sourceFiles) {
const fb = fs.readFileSync(path.join(outputDir, sourceFile));
const hash = crypto.createHash('sha1');
hash.update(fb)
const hexHash = hash.digest('hex');
const replacePattern = new RegExp(escapeStringRegexp(sourceFile) + '(:?\\?h=[^"]+)?', 'g');
indexHtml = indexHtml.replaceAll(replacePattern, `${sourceFile}?h=${hexHash.substring(0, 8)}`);
}
fs.writeFileSync(options.htmlFile, indexHtml);
},
};
}
and then
plugins: [
production && insertHashToFile({
sourceFiles: [
"bundle.js",
"bundle.css",
],
htmlFile: "public/index.html",
}),
]
I have (polymorphic) code that is bundled using webpack including dynamic-imports (for code-splitting) and want to run the same code in NodeJS, currently using babel-register to be able to run the ES6 code.
I've encountered an issue when it comes to the dynamic imports: The resolved/loaded module seems to be wrapped in an additional (default-)module.
The following is a minimal example, using NodeJS v10.14.1, #babel/core#7.2.2, #babel/plugin-syntax-dynamic-import#7.2.0, #babel/preset-env#7.3.1, #babel/register#7.0.0:
index.js:
require('#babel/register')({
presets: ['#babel/preset-env'],
plugins: [
'#babel/plugin-syntax-dynamic-import'
]
})
require('./main').run()
dep.js:
export const bar = 3;
main.js:
import * as syncMod from './dep'
export function run() {
console.log(syncMod)
import('./dep').then(mod => console.log(mod))
}
Running this via node --experimental-modules index.js yields:
{ bar: 3 } // the syncMod
[Module] { default: { bar: 3 } } // the dynamically loaded
The "normal" import works as expected and directly gives the object with the exports. The dynamic-import does this too (as I would expect) in the bundled browser-version, but returns this [Module] { default: .. } thing around it in NodeJS.
Running without the babel-register code and rather using babel-node (#babel/node#7.2.2) (npx babel-node --experimental-modules --plugins=#babel/plugin-syntax-dynamic-import --presets=#babel/preset-env index) yields the same result.
I need to run it with the experimental flag, otherwise the dynamic import did not work at all (Not supported Error thrown).
I could access the dynamically-loaded module without further problems by accessing it below the added layer like mod.default.bar, but then my code would differ for browser and NodeJS.
I'm grateful for any explanation for this behaviour and maybe a solution (/where I went wrong) to get the normal/expected exports object.
I'm currently using this helper as a workaround:
function lazyImport(promise) {
if (typeof window !== 'undefined') {
// Browser
return promise
}
// Node
return promise.then(mod => mod.default)
}
and wrap every import() like this:
lazyImport(import('./path/to/mod')).then(mod => /* use the mod */)
The docs don't have any examples of using this on its own but they do say this:
Unless you're using the Gulp or Webpack plugins, you'll need to specify code via flags. Both jsCode and externs accept an array containing objects in the form {src, path, sourceMap}. Using path, you can construct a virtual filesystem for use with ES6 or CommonJS imports—although for CommonJS, be sure to set processCommonJsModules: true.
I've created a "compile.js" file based on the docs:
const compile = require('google-closure-compiler-js').compile;
const flags = {
jsCode: [{path: './server/server.js'}],
processCommonJsModules: true
};
const out = compile(flags);
console.info(out.compiledCode);
In my "./server/server.js" file, I put a console.log but it doesn't output. Not sure where to go from here...
Borrowing from icidasset/quotes.
It appears, to me, that path is not intended to be used as you are using it.
Quote:
Using path, you can construct a virtual filesystem for use with ES6 or CommonJS imports—although for CommonJS, be sure to set processCommonJsModules: true.
So instead you must expand your own sources, something webpack and gulp must be doing for you when you go that route.
files=['./server/server.js']
files.map(f => {
const out = compile({
jsCode: [{ src: f.content }],
assumeFunctionWrapper: true,
languageIn: 'ECMASCRIPT5'
});
return out;
}
I have a project that is using babel-register to dynamically transpile ES6 source to ES5 when requiring that module in a Node 6.6 project. I've read that babel-register hooks into Node's require function in order to transpile a file when you try to load it, but I'm not always clear on which files will be affected by that change.
This question comes up for me a lot when I'm writing tests: is only my production code getting transpiled, or does the test code get transpiled too?This brings me to the more general question, which is the topic of this post:
How can I tell when Babel is actually running, and which files are being transpiled?
Example code
Let's say I have production classes like this that are written in ES6 syntax
//src/greeter.js
export default class Greeter {
sayHello() {
return 'Hello World';
}
}
and Babel is configured to transpile as so (.babelrc)
{
"presets": ["es2015"]
}
and then there's some test code
//features/step_definitions/greeter_steps.js
import Greeter from '../../src/greeter'; //Causes greeter.js to be transpiled
import expect from 'expect';
var stepWrapper = function() {
//Does Babel try to transpile this code too?
this.Given(/^a greeter$/, function() {
this.greeter = new Greeter();
});
this.When(/^I ask it for a general greeting$/, function() {
this.greeting = this.greeter.sayHello();
});
this.Then(/^it should greet the entire world$/, function() {
expect(this.greeting).toEqual('Hello World');
});
};
module.exports = stepWrapper;
and all of that runs on Node like so
cucumberjs --compiler js:babel-core/register
Example code is available here, if that is helpful.
I made a hack to node_modules/babel-register/lib/node.js to do some logging like so
function compile(filename) {
var result = void 0;
var opts = new _babelCore.OptionManager().init((0, _extend2.default)({ sourceRoot: _path2.default.dirname(filename) }, (0, _cloneDeep2.default)(transformOpts), { filename: filename }));
var cacheKey = (0, _stringify2.default)(opts) + ":" + babel.version;
var env = process.env.BABEL_ENV || process.env.NODE_ENV;
console.log('[babel-register::compile] filename=' + filename + '\n'); //Added logging here
if (env) cacheKey += ":" + env;
if (cache) {
var cached = cache[cacheKey];
if (cached && cached.mtime === mtime(filename)) {
result = cached;
}
}
...
}
which then reports that test and production code are at least passing through Babel on some level
$ npm t
> cucumber-js-babel#1.0.0 test /Users/krull/git/sandbox/node/cucumber-js-babel
> cucumberjs --compiler js:babel-core/register
[babel-register::compile] filename=.../node/cucumber-js-babel/features/step_definitions/greeter_steps.js
[babel-register::compile] filename=.../node/cucumber-js-babel/src/greeter.js
...test results...
However, I'm hoping for a better solution that
works by some means of plugins and/or configuration, instead of monkey patching
better distinguishes which files are actually being transpiled, and which ones pass through Babel without modification
Because of this:
cucumberjs --compiler js:babel-core/register
...babel is invoked for both your test and regular source code. Keep in mind that in node, the only way to import JS is through require, so obviously babel-register will always be invoked. Of course, what babel does depends on its configuration, but most likely you have a simple configuration where all files required by require except those under node_modules will be transpiled.
Is it possible to "require" an entire folder using requireJS.
For example, I have a behaviors folder with a ton of behavior js files. I'd really like to be able to simply use require(['behaviors/*'], function() {...}); to load everything in that folder rather than having to keep that list up to date. Once compressed and optimized I'd have all those files lump together, but for development it's easier to work with them individually.
javascript in browser has no filesystem access and so it can't scan a directory for files. If you are building your app in a scripting language like php or ruby you could write a script that scans the directory and adds the file names to the require() call.
I don't know if I can recommend this approach anymore. I think the more explicit way to do this is by manually "requiring"/"exporting" the functionality you need. The exception I think is if you have a "namespace" of files that you want exported see below "Babel and ES6 Module Import Declarations (export-namespace-from) or see below "Babel and ES6 Module Import Declarations.
These solutions also assume that you have a meaningful file structure - where file names become part of that "require" * definition.
However, if you still need to do this there are a few existing tools and methods that might provide the behavior that you're looking for.
Possible Solutions
Babel and ES6 Module Import Declarations (plugin-export-namespace-from)
Have a setup that is ES6 compliant.
You need to update your .babelrc file to include babel-plugin-proposal-export-namespace-from.
Use export namespace plugin by writing syntax like the following:
common/index.js
export * from './common/a'; // export const a = false;
export * from './common/b'; // export const b = true;
main.js
import { a, b } from './common';
console.log(a); // false
console.log(b); // true
Babel and ES6 Module Import Declarations (plugin-wildcard)
Have a setup that is ES6 compliant.
You need to update your .babelrc file to include babel-plugin-wildcard.
Use wildcard namespace plugin by writing syntax like the following:
main.js
import { a, b } from './common/*'; // imports './common/a.js' and './common/b.js'
console.log(a); // false
console.log(b); // true
RequireJS (Now Outdated)
Download and install require-wild npm install require-wild
Configure the declaration as follows
grunt.initConfig({
requireWild: {
app: {
// Input files to look for wildcards (require|define)
src: ["./**/*.js"],
// Output file contains generated namespace modules
dest: "./namespaces.js",
// Load your require config file used to find baseUrl - optional
options: { requireConfigFile: "./main.js" }
}
}
});
grunt.loadNpmTasks("require-wild");
grunt.registerTask('default', ['requireWild']);
Then run the grunt task. Your file will be generated. Modify your setup to load namespaces.js
require(['namespaces'], function () { ... });
This now allows modules under src to use dependencies glob pattern matching.
require(['behaviors/**/*'], function (behaviors) { }
I know this is old, but I'd like to share my solution:
For this solution you need JQuery
1) Create a bash script that will list all the js files in
"MyDirectory/", and save it to "directoryContents.txt":
#!/bin/bash
#Find all the files in that directory...
for file in $( find MyDirectory/ -type f -name "*.js" )
do
fileClean=${file%.js} #Must remove .js from the end!
echo -n "$fileClean " >> MyDirectory/directoryContents.txt
done
File will look like this:
MyDirectory/FirstJavascriptFile MyDirectory/SecondJavascriptFile
MyDirectory/ThirdJavascriptFile
Problem with my script! Puts an extra " " at the end, that messes things up! Make sure to remove the excess space at the end of directoryContents.txt
2) Then in your Client side JS code:
do a "GET" request to retrieve the text file
For each entry (split by the space), 'require' that file:
.
$.get( "MyDirectory/directoryContents.txt", {}, function( data ) {
var allJsFilesInFolder = data.split(" ");
for(var a=0; a<allJsFilesInFolder.length; a++)
{
require([allJsFilesInFolder[a]], function(jsConfig)
{
//Done loading this one file
});
}
}, "text");
I was having a problem with this code not finishing before my other code, so Here's my extended answer:
define([''], function() {
return {
createTestMenu: function()
{
this.loadAllJSFiles(function(){
//Here ALL those files you need are loaded!
});
},
loadAllJSFiles: function(callback)
{
$.get( "MyDirectory/directoryContents.txt", {}, function( data ) {
var allJsFilesInFolder = data.split(" ");
var currentFileNum = 0;
for(var a=0; a<allJsFilesInFolder.length; a++)
{
require([allJsFilesInFolder[a]], function(jsConfig)
{
currentFileNum++;
//If it's the last file that needs to be loaded, run the callback.
if (currentFileNum==allJsFilesInFolder.length)
{
console.log("Done loading all configuration files.");
if (typeof callback != "undefined"){callback();}
}
});
}
}, "text");
}
}
});
What I ended up doing was everytime my Node server boots, it will run the bash script, populating directoryContents.txt. Then My client side just reads directoryContents.txt for the list of files, and requires each in that list.
Hope this helps!
There isn't really a way to do this conceptually on the fly (that I know of).
There's a few work arounds though:
Use grunt and concat and then just require that behemoth...I know, kinda sucky.
What I think is a better solution... use a require hierarchy like so:
require('/js/controllers/init', function(ctrls){
ctrls(app, globals);
});
// /js/controllers/init.js
define('js/controllers/index', 'js/controllers/posts', function(index, posts){
return function protagonist(app, globals){
var indexModule = index(app, globals);
var indexModule = posts(app, globals);
return app || someModule;
};
});
// /js/controllers/index.js
define('js/controllers/index', 'js/controllers/posts', function(index, posts){
return function protagonist(app, globals){
function method1(){}
function method2(){}
return {
m1: method1,
m2: method2
};
};
});
Note that "protagonist" function. That allows you to initialize modules before their use, so now you can pass in a 'sandbox' -- in this case app and globals.
Realistically, you wouldn't have /js/controllers/index.js... It should probably be something like /js/controllers/index/main.js or /js/controllers/index/init.js so that there is a directory adjacent to (sibling of) /js/controllers/init.js called "index". This will make your modules scalable to a given interface -- you can simply swap modules out and keep your interface the same.
Hope this helps! Happy coding!
I wrote a library to solve this problem. Eventually someone else came along and improved my library, here it is:
https://github.com/smartprocure/directory-metagen
You can use my lib with Gulp or whatever - it generates metadata for your project and RequireJS can use that metadata to require the desired files from the filesystem.
Using this lib will produce a RequireJS module that looks something like this:
define(
[
"text!app/templates/dashboardTemplate.ejs",
"text!app/templates/fluxCartTemplate.ejs",
"text!app/templates/footerTemplate.ejs",
"text!app/templates/getAllTemplate.ejs",
"text!app/templates/headerTemplate.ejs",
"text!app/templates/homeTemplate.ejs",
"text!app/templates/indexTemplate.ejs",
"text!app/templates/jobsTemplate.ejs",
"text!app/templates/loginTemplate.ejs",
"text!app/templates/overviewTemplate.ejs",
"text!app/templates/pictureTemplate.ejs",
"text!app/templates/portalTemplate.ejs",
"text!app/templates/registeredUsersTemplate.ejs",
"text!app/templates/userProfileTemplate.ejs"
],
function(){
return {
"templates/dashboardTemplate.ejs": arguments[0],
"templates/fluxCartTemplate.ejs": arguments[1],
"templates/footerTemplate.ejs": arguments[2],
"templates/getAllTemplate.ejs": arguments[3],
"templates/headerTemplate.ejs": arguments[4],
"templates/homeTemplate.ejs": arguments[5],
"templates/indexTemplate.ejs": arguments[6],
"templates/jobsTemplate.ejs": arguments[7],
"templates/loginTemplate.ejs": arguments[8],
"templates/overviewTemplate.ejs": arguments[9],
"templates/pictureTemplate.ejs": arguments[10],
"templates/portalTemplate.ejs": arguments[11],
"templates/registeredUsersTemplate.ejs": arguments[12],
"templates/userProfileTemplate.ejs": arguments[13]
}
});
You can then require modules in your front-end like so:
var footerView = require("app/js/jsx/standardViews/footerView");
however, as you can see this is too verbose, so the magic way is like so:
name the dependency above as allViews!
now you can do:
var allViews = require('allViews');
var footerView = allViews['standardViews/footerView'];
There are two advantages to requiring directories whole:
(1) in production, with the r.js optimizer, you can point to one dependency (module A) and it can then easily trace all of A's dependencies that represent a entire directory
(2) in development, you can require whole directories up front and then use synchronous syntax to require dependencies because you know they have already been loaded
enjoy "RequireJS-Metagen"
https://github.com/smartprocure/directory-metagen
https://www.npmjs.com/package/requirejs-metagen
https://github.com/ORESoftware/requirejs-metagen