So I'm planning to separate my functions into separate files and then import them into a single index.js which then becomes the main exporter. So I'm wondering if having something like var bcrypt = require('bcrypt') in several of my files be slower than just having it in one file.
Here's how I'm planning to group and export in index.js
const fs = require('fs');
const path = require('path')
const modules = {}
const files = fs.readdirSync(__dirname)
files.forEach(file => {
if (file === 'index.js') return
let temp = require(path.join(__dirname, file))
for (let key in temp) {
modules[key] = temp[key]
}
});
module.exports = modules
As an example of what I mean:
file1.js
var bcrypt = require("bcrypt");
module.exports.file1test = "hi"
file2.js
var bcrypt = require("bcrypt");
module.exports.file2test = "bye"
No, it does not. Whenever a module is required for the first time, the module's code runs, assigns something to its exports, and those exports are returned. Further requires of that module simply reference those exports again. The logic is similar to this:
const importModule = (() => {
const exports = {};
return (name) => {
if (!exports[name]) exports[name] = runModule(name);
return exports[name];
};
})();
So, multiple imports of the same module is no more expensive than referencing an object multiple times.
Related
I have many different modules in a project, each module has it's own folder, within each module, there are 3 files:
index.js
index.test.js
data.js
Every module has these files, same file names and different content.
For the index.test.js files, the import statements are all the same for all index.test.js files in all modules, look like this:
const runAnalysis = require('./index');
const { data } = require('./data');
const { formatData } = require('utils');
const formattedData = formatData(data);
Since these import statements are the same for all index.test.js files, is there a way to consolidate these into one line, so it doesn't have to be repeated over and over in every module?
Is there a way to put these import statements in a header file, and then each index.test.js can just import the header file?
Not sure if this is efficient or recommended, but it works.
Create a util js file and it can live at the project root directory, let's name it import-util.js with the following code.
const { formatData } = require('utils');
const getMyImports = (dirname) => {
const runAnalysis = require(`${dirname}/index`);
const { data } = require(`${dirname}/data`);
const formattedData = formatData(data);
return {
runAnalysis,
formattedData
}
};
exports.getMyImports = getMyImports;
Then this file can be used like this to get the runAnalysis and formattedData
const { runAnalysis, formattedData } = require('../import-util').getMyImports(__dirname);
__dirname is the trick to make it work, it provides the directory path for the index.js and data.js to be imported.
This single line can now be used in every module folder, given the folder contains the index.js and data.js, it works the same as the following 4 lines of code.
const runAnalysis = require('./index');
const { data } = require('./data');
const { formatData } = require('utils');
const formattedData = formatData(data);
I need to find a way to get the newest folder created in a directory. Usually there are 3-5 different folders inside of a folder, and I want to find the newest one either by folder name (newest version numbering) or date and time created. Generally the folder name would like like this:
version-0101xxxxxxxxxxxx
(x representing the newest version)
You can get directories with 'fs' using 'readdirSync', then look inside each directory recursively.
This code snippet is extracted from (this answered question):
const fs = require('fs');
const path = require('path');
function flatten(lists) {
return lists.reduce((a, b) => a.concat(b), []);
}
function getDirectories(srcpath) {
return fs.readdirSync(srcpath)
.map(file => path.join(srcpath, file))
.filter(path => fs.statSync(path).isDirectory());
}
function getDirectoriesRecursive(srcpath) {
return [srcpath, ...flatten(getDirectories(srcpath).map(getDirectoriesRecursive))];
}
let dirs = getDirectoriesRecursive(__dirname);
console.info(path);
This will get you an array with all directories, then you just need to loop your array with the condition you need.
You can use node.js built-in fs module for this. First of all you need to list all the files and folders in the directory that you intend to search.
let dirs = await fs.readdir(dirName, {
withFileTypes: true,
});
You can use fs.promises.readdir for this. Make sure to add the option "withFileTypes" because it gives additional information that you can use to distinguish folders from files.
You can then filter all files like this:
dirs = dirs.filter((file) => file.isDirectory()).map((dirent) => dirent.name);
This gives you a result like ["firstFolder", "secondFolder", etc]
And then you can loop through the above array and find the newest folder by using the fs.promises.stat method
let newestFolder = await fs.stat(path.join(dirName, dirs[0]));
let newestFolderName = dirs[0];
for (let directory of dirs) {
const stats = await fs.stat(path.join(dirName, directory));
if (stats.ctimeMs > newestFolder.ctimeMs) {
newestFolder = stats;
newestFolderName = directory;
}
}
And then you can create the absolute path of that directory using path module:
const absoultePath = path.resolve(path.join(dirName, newestFolderName));
Here is how the function looks using promises:
const fs = require("fs/promises");
const path = require("path");
async function getNewestDirectory(dirName) {
let dirs = await fs.readdir(dirName, {
withFileTypes: true,
});
dirs = dirs.filter((file) => file.isDirectory()).map((dirent) => dirent.name);
let newestFolder = await fs.stat(path.join(dirName, dirs[0]));
let newestFolderName = dirs[0];
for (let directory of dirs) {
const stats = await fs.stat(path.join(dirName, directory));
if (stats.ctimeMs > newestFolder.ctimeMs) {
newestFolder = stats;
newestFolderName = directory;
}
}
console.log("newestFolder", newestFolder);
console.log("newestFolderName", newestFolderName);
return path.resolve(path.join(dirName, newestFolderName));
}
and using synchronous approach(not recommended):
const fs = require("fs");
let dirs = fs.readdirSync(".", {
withFileTypes: true,
});
dirs = dirs.filter((file) => file.isDirectory()).map((dirent) => dirent.name);
let newestFolder = fs.statSync(dirs[0]);
let newestFolderName = dirs[0];
for (let directory of dirs) {
const stats = fs.statSync(directory);
if (stats.ctimeMs > newestFolder.ctimeMs) {
newestFolder = stats;
newestFolderName = directory;
}
console.log("newestFolder", newestFolder);
console.log("newestFolderName", newestFolderName);
}
I have an index.js script that contains sequelize models
here is the tree structure of my folder
models/
files/
xml_files/
csv_files/
txt_files/
index.js
server.js
this my index.js code :
const generate_files = require('./files')
const File = generate_files(sequelize, DataTypes)
const generate_xml_files = require('./xml_files')
const Xml_File = generate_xml_files(sequelize, DataTypes)
const generate_csv_files = require('./csv_files')
const Csv_File = generate_csv_files(sequelize, DataTypes)
const generate_txt_files = require('./txt_files')
const Txt_File = generate_txt_files(sequelize, DataTypes)
module.exports = {
Files, Xml_File, Csv_File, Txt_File
}
in `` server.js ``` I imported each model like this :
const {Files, Xml_File, Csv_File, Txt_File} = require('./models')
Now in server.js I want to get the name of the module sequelize then I check if the name of the module matches the name of the table like this :
const {Files, Xml_File, Csv_File, Txt_File} = require('./models')
if (Files.name == TableName){
Files.findAll()
}
if (Xml_File.name == TableName){
Xml_File.findAll()
}
if (Csv_File.name == TableName){
Csv_File.findAll()
}
....
how can I avoid doing tests every time ?
You can do:
const Example = require('./index')
Example.Files
As for making a loop, I'm not sure you can do that, but you can do:
export const File = generate_files(sequelize, DataTypes)
Which is the same thing as:
module.exports = {File}
You can create an object and loop through the entries, like so.
var exports = {
'File': './files',
'Xml_File': './xml_files',
'Csv_File': './csv_files',
'Txt_File': './txt_files'
};
var e = {};
Object.entries(exports).forEach(ex=>{
e[ex[0]] = require(ex[1])(sequelize, DataTypes);
});
module.exports = e;
Then you'll want to import them from this index file...
const {Files, Xml_File, Csv_File, Txt_File} = require('./models/index.js')
Per your update..
const Models = require('./models/index.js');
Object.entries(Models).forEach(model=>{
if (model.name == TableName){
model.findAll();
}
}
To simplify your index.js you could also do it like this:
module.exports = {
Files: require('./files')(sequelize, DataTypes),
Xml_File: require('./xml_files')(sequelize, DataTypes),
Csv_File: require('./csv_files')(sequelize, DataTypes),
Txt_File: require('./txt_files')(sequelize, DataTypes)
}
it's easier to read, at least for me.
Another way is to loop over the dir so you never need to change the code in index.js even if you add another folder like /xxx_files
const Fs = require('fs');
var dir = Fs.readdirSync(__dirname, { withFileTypes: true });
var exports = {};
dir.forEach(d => {
if (d.isDirectory())
exports[d.name] = require('./' + d)(sequelize, DataTypes);
});
// !!!!!!! the names of the exports will be names of the directories
module.exports = exports;
And to avoid the checks if (Files.name == TableName)
all you need is to not import like this:
const {Files, Xml_File, Csv_File, Txt_File} = require('./models')
but rather like this:
const models = require('./models')
// models = { files, xml_files, csv_files, txt_files }
then you can do this:
var db = models[TableName] // assuming TableName is files, xml_files etc.
if (db)
doSomething();
I have a files.js file and am requiring it at the start of my code.
When I use:
const files = require('./lib/files');
I get this error:
Error: Cannot find module './lib/files'
However if I test it with another one of the files in the same folder like:
const files = require('./lib/repo');
It runs.
files.js:
const fs = require('fs');
const path = require('path');
module.exports = {
getCurrentDirectoryBase: () => {
return path.basename(process.cwd());
},
directoryExists: (filePath) => {
return fs.existsSync(filePath);
}
};
I would use tree command however I have too many node modules installed to show it correctly so:
const { getCurrentDirectoryBase, directoryExists } = require('./lib/files')
How can I initialize a static variable in module.exports = class in node.js.
Basically, what I'm trying to achieve is, if StaticVariable is null, Ill get data from a json file. Then store it in StaticVariable.
module.exports = class Config {
static fetch() {
if ( StaticVariable === null ) {
const fs = require('fs');
const data = fs.readFileSync('./config.json');
const config = JSON.parse(data);
StaticVariable = config;
}
return StaticVariable;
}
}
Function fetch() will be called several times so it is unnecessary to readFileSync every call.
Static-only class is an antipattern in JavaScript because a class is never instantiated.
In case there's a need to have a method that lazily loads JSON file, a plain object can be used. There's already such object in module scope, module.exports:
const fs = require('fs');
let StaticVariable;
exports.fetch = () => {
if ( StaticVariable == undefined ) { // not "=== null"
const data = fs.readFileSync('./config.json');
const config = JSON.parse(data);
StaticVariable = config;
}
return StaticVariable;
}
There may be no need to parse it manually because this could be handled by require('./config.json') one-liner and with more consistent relative paths.
In case JSON file can be eagerly loaded, this can be simplified to:
exports.config = require('./config.json');
If there's a need for Config class and it should access configuration object, it can refer to it, e.g.:
exports.Config = class Config {
constructor() {
this.config = deepClone(exports.config);
}
modify() {
// modify this.config
}
};
I can think of several ways to achieve what you are asking.
Saving it in a global variable
//initialise it here
var StaticVariable = null;
//however if you initialise it here, it makes more sense to just load it once
const fs = require('fs');
const data = fs.readFileSync('./config.json');
const config = JSON.parse(data);
StaticVariable = config;
module.exports = class Config {
static fetch() {
return StaticVariable;
}
}
Or just use require. Require will do the same thing what you want to do. It will read the file config.json, try to parse it as a valid json and it will do this only once.
module.exports = class Config {
static fetch() {
return require('./config.json');
}
}
Starting from (node 15.2.1) ES2020, static private class fields is supported. So from now on static class may not be anti pattern and you can instantiate a class using new keywords. ref: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes/static
module.exports = class Config {
static #StaticVariable = null;
static fetch() {
if ( StaticVariable === null ) {
const fs = require('fs');
const data = fs.readFileSync('./config.json');
const config = JSON.parse(data);
StaticVariable = config;
}
return StaticVariable;
}
}
Where # sign means private more reference can be found in https://node.green, but still the easiest way is described in other answers
exports.config = require('./config.json');