"Reloading" a file in Node.JS - javascript

In Node.JS, I'm trying to "reload" a file. I have the following code:
delete require.cache[require.resolve("./pathToFile/" + restartModule)]
with restartModule being the file name, but I'm not sure how I could add the file back using require() and define it as the variable restartModule. For example, if restartModule is myModule, how would I add myModule.js into the var called myModule? Or maybe there's an easier way to simply "reload" a file in the cache?

You could do something simple enough like this:
function reloadModule(moduleName){
delete require.cache[require.resolve(moduleName)]
console.log('reloadModule: Reloading ' + moduleName + "...");
return require(moduleName)
}
var restartModule= reloadModule('./restartModule.js');
You would have to call reloadModule every time you want to reload the source though. You could simplify by wrapping like:
var getRestartModule = function() {
return reloadModule('./restartModule.js');
}
getRestartModule().doStuff();
Or
var reloadModules = function() {
return {
restartModule = reloadModule('./restartModule.js');
};
}
var modules = reloadModules();
modules.restartModule.doStuff();
Or:
var reloadModules = function(moduleList) {
var result = {};
moduleList.forEach((module) => {
result[module.name] = reloadModule(module.path);
});
}
var modules = reloadModules([{name: 'restartModule', path: './restartModule.js'}]);
modules.restartModule.doStuff();
You could even put the module reload on a setInterval so modules would get loaded every N seconds.
Then there's always nodemon: https://nodemon.io/ this is useful in development, whenever a source file changes it will reload your server.
You just use it like node, e.g.
nodemon server.js

Related

Is it possible to launch js files with different permissions using nodejs/npm?

I want to launch a js file in a js file with different permission. Just like this:
main.js (which gets started)
config = JSON.parse(require("./config.json")) // <- should be possible
console.log(config.authkey) // <- should be possible
require("./randomJSFile.js").run()
randomJSFile.js (which will be executed by main.js)
exports.run = () => {
let config = JSON.parse(require("./config.json") // <--- this should not be possible, only the main.js file should have access to the config.json file
console.log(config.authkey) // should not be possible
}
Does anyone know how to do something like that?
Based on a snippet from this question here you could possibly override the require function to check for the filename, something like this:
const Module = require('module');
const originalRequire = Module.prototype.require;
Module.prototype.require = function() {
if (!arguments.length || typeof arguments[0] !== 'string') return {};
if (arguments[0].includes('config.json')) return {};
return originalRequire.apply(this, arguments);
};
And then perform this override after you've already required the config in your main file, so you don't accidentally block yourself

obfuscate js using cli

I use javascript-obfuscator package, it works fine if I do operations with files, like:
javascript-obfuscator source.js
but I need to dynamically change js source content and get output on console in way like
javascript-obfuscator "var foo = 'bar'; alert(foo);"
Any suggestion how can I get rid from saving content to file and do it like in snippet above?
Not really considering why you'd need to do this, it looks like you'll need to use the programmatic API as described in the library's README.
Let's call this obfuscate.js:
var JavaScriptObfuscator = require('javascript-obfuscator');
var obfuscationResult = JavaScriptObfuscator.obfuscate(
process.argv[2],
{
compact: false,
controlFlowFlattening: true
}
);
console.log(obfuscationResult.getObfuscatedCode());
$ node obfuscate.js 'console.log(1)'
will then output (for example)
var _0x2b5a = ['log'];
(function (_0x630038, _0x2944a9) {
var _0x83df37 = function (_0x2ef1a5) {
while (--_0x2ef1a5) {
_0x630038['push'](_0x630038['shift']());
}
};
_0x83df37(++_0x2944a9);
}(_0x2b5a, 0xd7));
var _0x493b = function (_0x2b48eb, _0x33884a) {
_0x2b48eb = _0x2b48eb - 0x0;
var _0x41338b = _0x2b5a[_0x2b48eb];
return _0x41338b;
};
console[_0x493b('0x0')](0x1);

javascript: Creating directories from file path recursively adding timestamp to the filename

I am using the below code to split up a user provided path, create all intermediate dirs in the path and attach a timestamp to the ending file name. I am splitting the path with / first and then using forEach over the resulting array. Is there any better way/direct lib function to do this in javascript / nodejs.
function test(usrPath) {
var locMatches = usrPath.split("/")
locMatches.forEach (function (item) {
location = pathUtils.join(location,item)
if (!fs.existsSync(location)) {
fs.mkdirSync(location)
}
})
return pathUtils.join (location,usrPath + (new Date).toISOString().replace(/(^\d\d\d\d)|-|:|(\..*)/g,"").replace("T","_")+".log")
}
Ok, so there are path utils that allow to make the implementation better across platform.
Also, it gives a better managed access for working with path elements like root, dir, filename and extension. pathUtils.sep allows working on the dir elements more cross platform.
var pathUtils = require('path')
function test(usrPath) {
var pathElements = pathUtils.parse(usrPath)
pathElements.dir.split(pathUtils.sep).forEach (function (item) {
location = pathUtils.join(location,item)
if (!fs.existsSync(location)) {
fs.mkdirSync(location)
}
})
return pathUtils.join (location,pathElements.name + (new Date).toISOString().replace(/(^\d\d\d\d)|-|:|(\..*)/g,"").replace("T","_")+pathElements.ext.replace(/^$/i,".log"))
}

trying to consolidate all require files into one file in webpack

Fir the longest time I have been having to require every file i need to use at the top of of every file that needs to use said file like so:
var moduleAPartOne = require('./module.a.one.js');
var moduleAPartTwo = require('./module.a.two.js');
var moduleAPartThree = require('./module.a.three.js');
In a large scale application this makes maintaining and making changes to the structure of the app down the road a hassle.
I am trying to put all these file requires into a single file which other files can call like so:
// a.module.js
module.exports = {
aOne: require('./a.one.js'),
aTwo: require('./a.two.js'),
aThree: require('./a.three.js')
}
// then in my other files elsewhere in the app
var aModule = require('a/a.module.js');
The object I expect to get out of this is:
aModule = {
aOne: { // some stuff... },
aTwo: { // some stuff... },
aThree: { // some stuff... }
}
instead I get:
aModule = {};
What do I need to do in order for aModule to be populated with properties and methods created in the files it required?

How to setup yeoman test for subgenerator that reads package.json

I have a subgenerator that uses the name from the package.json. Now I want to test that function and wrote a before() that is supposed to create a dummy package.json for the test.
Problem is that the subgenerator cannot read the dummy json file.
test file:
before(function (done) {
helpers.run(path.join( __dirname, '../addcomponent'))
.inDir(path.join( __dirname, './tmp'), function(dir) {
fs.copyTpl(
path.join(__dirname, '../app/templates/_package.json'),
dir + 'package.json',
{ ProjectName: 'foo' }
);
var test = fs.readJSON(dir + 'package.json');
console.log('test: ' + test); // returns the object
console.log('test.name: ' + test.name); // returns the correct name
})
.withArguments(['foo'])
.withPrompts(prompts)
.withOptions(options)
.on('end', done);
});
but in my sub-generator:
var memFs = require('mem-fs');
var editor = require('mem-fs-editor');
var store = memFs.create();
var fs = editor.create(store);
...
init: function() {
this.pkg = this.fs.readJSON('package.json');
console.log('this.pkg: ' + this.pkg); // returns undefined
}
// or
init: function() {
this.on('ready', function() {
this.pkg = this.fs.readJSON('package.json');
console.log('this.pkg: ' + this.pkg); // returns undefined
});
}
// or
anyOther: function() {
this.pkg = this.fs.readJSON('package.json');
console.log('this.pkg: ' + this.pkg); // returns undefined
}
The whole setup can be found here: https://travis-ci.org/markusfalk/generator-kickstart/builds/58892092
thanks for any help
Edit: I'll keep the old answer underneath and that's probably relevant to most people running into this issue, but not to you.
The idea behind mem-fs is to have an in memory store. It doesn't write anything to disk automatically. As so, it keep the state in the mem-fs instance. In this case, you're creating your own mem-fs instance, while yeoman use another instance. This mean the file you write is never seen by Yeoman (and never written to disk).
For you, the fix would be to use the generator instance provided as the first parameter of the ready event.
helpers.run(path.join( __dirname, '../addcomponent'))
.on('ready', function (generator) {
generator.fs.write('file.txt', 'foo');
});
Another option is to use the node.js sync fs methods. (fs.writeFileSync(), etc)
My guess is you're using this.fs.readJSON() inside your generator constructor.
The constructor is initialized before the ready event is triggered. This mean you read the file before it is actually written.
The usual fix is to never read inside the constructor. You can delay this step until the initializing phase where the inDir() (or the ready event) callback has run.
As a side note, you should use inTmpDir() rather than inDir()

Categories

Resources