Why is process.env undefined in module? - javascript

I'm beginner with nodejs and I want environment variables shared through modules. I read those variables with dotenv package. But in next required module process.env is undefined.
app.js
console.log(require('dotenv').config())
console.log(process.env.NODE_ENV);
require('./task')
task.js
console.log(process.env);
console.log(process.env.NODE_ENV);
.env
NODE_ENV=development
PORT=8080
console log
{ parsed: { NODE_ENV: 'development', PORT: '8080' } }
development
undefined
E:\msf\nodejs_prj\compositor\task.js:2
console.log(process.env.NODE_ENV);
^
TypeError: Cannot read property 'NODE_ENV' of undefined
at Object.<anonymous> ...
I created new clean project with provided code and it works also for me. That means it's related to something else. This node.js is weard about errors.
This is my whole code from task.js
const fs = require('fs')
const path = require('path')
const decompress = require('decompress')
const dir = './upload'
console.log(process, process.env)
function process() {
console.log('cron - process data');
fs.readdir(dir, (err, files) => {
if (err) return
files.forEach(file => {
if (path.extname(file) != '.zip') return
let target = path.join(dir, path.basename(file).replace(path.extname(file), ''))
unlinkDirSync(target)
decompress(path.join(dir, file), target).then(files => {
console.log(files);
console.log('done!');
//todo process unzipped files
//todo delete unzipped directory and zip file
})
})
})
}
function unlinkDirSync(dir_path) {
if (fs.existsSync(dir_path)) {
fs.readdirSync(dir_path).forEach(function (entry) {
var entry_path = path.join(dir_path, entry);
if (fs.lstatSync(entry_path).isDirectory()) {
unlinkDirSync(entry_path);
} else {
fs.unlinkSync(entry_path);
}
});
fs.rmdirSync(dir_path);
}
}
if (process.env === undefined || process.env.NODE_ENV === undefined || process.env.NODE_ENV === 'production') {
console.log('starting on production')
setInterval(process, 1000 * 60)
} else {
console.log('starting on development')
setTimeout(process, 1000)
}
If I comment out the rest after console.log it works.

I'm idiot. I named function process, which is the name of system variable :D
Sorry for bothering you guys, thanks for help.

Add require('dotenv').config() in global. Also make sure .env file is in root directory of the project. I have created a sample on github https://github.com/GMaker01/basic-dotenv-example
For reference, you can look into the dotenv documentation

Related

Discordjs v12 | cannot find module ../undefined/help.js

My bot has a command located in /commands/developer/reload.js and it's purpose is to unload a command then load it again. But when trying to find the commands folder it throws an error saying Cannot find module '../undefined/help.js' but the path is ../misc/help.js
Code:
const fs = require('fs');
module.exports = {
name: 'reload',
description: 'Reloads a command',
args: true,
usage: '<command_name>',
cooldown: 1,
aliases: ['rl', 'restart'],
execute(message, args) {
const commandName = args[0].toLowerCase();
const command = message.client.commands.get(commandName) || message.client.commands.find(cmd => cmd.aliases && cmd.aliases.includes(commandName));
if(!command) {
return message.channel.send(`There is no command called '${commandName}'`);
}
// commandFolders returns undefined
const commandFolders = fs.readdirSync('./commands');
// Also returns undefined
const folderName = commandFolders.find(folder => {
fs.readdirSync(`./commands/${folder}`).includes(`${command.name}.js`);
})
// Command errors out here
delete require.cache[require.resolve(`../${folderName}/${command.name}.js`)];
// This part never runs.
try {
const newCommand = require(`../${folderName}/${command.name}.js`);
message.client.commands.set(newCommand.name, newCommand);
message.channel.send(`Command '${newCommand.name}' was reload successfully`)
} catch (err) {
console.error(err);
message.channel.send(`There was an error while reloading a Command.`)
}
}
}
The reason why you are getting the folder as undefined is because you are not returning the folder you are trying to find in the folderName function. It is trying to find a folder with the command and even if it does, you are not doing anything with it, you are not returning it or logging it into the console. So you just have to return it, the folderName function might look something like this:
const folderName = commandFolders.find(folder => fs.readdirSync(`commands/${folder}`).includes(`${command.name}.js`)) // If you want a one-liner
// Or
const folderName = commandFolders.find(folder => {
return fs.readdirSync(`commands/${folder}`).includes(`${command.name}.js`)
})
If the error persists, the error is most likely there because the path is not correct. So in that case, please provide the folder structure of your bot

Error: ENOTDIR: not a directory, scandir error on interactionCreate

All of my files from Commands are read in fine but I get an error from 'interactionCreate.jslocated inEvents`
node:internal/fs/utils:343
throw err;
^
Error: ENOTDIR: not a directory, scandir './Events/interactionCreate.js'
My Event.js file is as follows:
const { readdirSync } = require('fs');
const ascii = require('ascii-table');
let table = new ascii("Events");
table.setHeading('EVENTS', ' LOAD STATUS');
module.exports = (client) => {
readdirSync('./Events/').forEach(dir => {
const events = readdirSync(`./Events/${dir}`).filter(file => file.endsWith('.js'));
for(let file of events) {
let pull = require(`../Events/${dir}/${file}`);
if(pull.name) {
client.events.set(pull.name, pull);
} else {
table.addRow(file, 'EVENT REGISTERED')
continue;
} if(pull.aliases && Array.isArray(pull.aliases)) pull.aliases.forEach(alias => client.aliases.set(alias, pull.name))
}
});
console.log(table.toString());
}
Your problem is here:
readdirSync('./Events/').forEach(dir => {
const events = readdirSync(`./Events/${dir}`)
readdirSync will return all the entries in the Events dir, that includes both files and directories. You've named your variable dir but they aren't all dirs. This is evidenced by the error message which specifically states ./Events/interactionCreate.js is not a directory.
Either remove non-dirs from your Events directory (i.e. move that file), or better, check if dir is in fact a directory before calling readdirSync on it.
The easiest way to do that is to add the {withFileTypes: true} option, and then you can call dir.isDirectory()
See docs https://nodejs.org/api/fs.html#fsreaddirsyncpath-options

Config param are undefined in error handler

I have a config file that just sets the process.env data to params.
It works fine and I can use it correctly everywhere but on my unexpected exceptions handler I can't use it... all the params from the config file are undefined.
my config file:
module.exports = {
env: process.env.NODE_ENV,
};
here is my uncaught exception catcher:
process.on('uncaughtException', (error) => {
errorManagement.handler.handleError(error);
if (!errorManagement.handler.isTrustedError(error)) process.exit(1);
});
and here is error handler, env is undefined, everywhere else env is defined
const {
env,
} = require('../../config');
const logger = require('../../libraries/logger');
const mailer = require('../../libraries/mailer');
function ErrorHandler() {
this.handleError = async (err) => {
console.log(env);
};
}
module.exports.handler = new ErrorHandler();
tree of my project folder:
EDIT:
I found the problem but I'm still not sure why it happened...
in my config.js file I did:
const errorManager = require('./components/errorManagement');
[
'DB_USER',
].forEach((name) => {
if (typeof process.env[name] === 'undefined') {
throw new errorManager.AppError('Environment var missing', 500, `Environment variable ${name} is missing`, true);
}
});
when I deleted the error manager and used express Error everything worked as expected

How do you check if a file exist locally with an electron app

I am trying to see if a file exists locally like this:
if (exec(`-f ~/.config/myApp/bookmarks.json`)) {
console.log('exists')
} else {
console.log('does not')
}
However, I get exists in the console whether the file exists or not
You should import the fs module into your code.
If you're running on the mainprocess, then do a simple const fs = require('fs'); but if you're on the renderer process then run const fs = require('electron').remote.require('fs')
Then with the fs module you can run a simple exists method on the file:
if (fs.existsSync(`~/.config/myApp/bookmarks.json`)) {
console.log('exists')
} else {
console.log('does not')
}
Although you really should check for this asynchronously:
fs.access(`~/.config/myApp/bookmarks.json`, (err) => {
if (err) {
console.log('does not exist')
} else {
console.log('exists')
}
})

Looping through files in a folder Node.JS

I am trying to loop through and pick up files in a directory, but I have some trouble implementing it. How to pull in multiple files and then move them to another folder?
var dirname = 'C:/FolderwithFiles';
console.log("Going to get file info!");
fs.stat(dirname, function (err, stats) {
if (err) {
return console.error(err);
}
console.log(stats);
console.log("Got file info successfully!");
// Check file type
console.log("isFile ? " + stats.isFile());
console.log("isDirectory ? " + stats.isDirectory());
});
Older answer with callbacks
You want to use the fs.readdir function to get the directory contents and the fs.rename function to actually do the renaming. Both these functions have synchronous versions if you need to wait for them to finishing before running the code afterwards.
I wrote a quick script that does what you described.
var fs = require('fs');
var path = require('path');
// In newer Node.js versions where process is already global this isn't necessary.
var process = require("process");
var moveFrom = "/home/mike/dev/node/sonar/moveme";
var moveTo = "/home/mike/dev/node/sonar/tome"
// Loop through all the files in the temp directory
fs.readdir(moveFrom, function (err, files) {
if (err) {
console.error("Could not list the directory.", err);
process.exit(1);
}
files.forEach(function (file, index) {
// Make one pass and make the file complete
var fromPath = path.join(moveFrom, file);
var toPath = path.join(moveTo, file);
fs.stat(fromPath, function (error, stat) {
if (error) {
console.error("Error stating file.", error);
return;
}
if (stat.isFile())
console.log("'%s' is a file.", fromPath);
else if (stat.isDirectory())
console.log("'%s' is a directory.", fromPath);
fs.rename(fromPath, toPath, function (error) {
if (error) {
console.error("File moving error.", error);
} else {
console.log("Moved file '%s' to '%s'.", fromPath, toPath);
}
});
});
});
});
Tested on my local machine.
node testme.js
'/home/mike/dev/node/sonar/moveme/hello' is a file.
'/home/mike/dev/node/sonar/moveme/test' is a directory.
'/home/mike/dev/node/sonar/moveme/test2' is a directory.
'/home/mike/dev/node/sonar/moveme/test23' is a directory.
'/home/mike/dev/node/sonar/moveme/test234' is a directory.
Moved file '/home/mike/dev/node/sonar/moveme/hello' to '/home/mike/dev/node/sonar/tome/hello'.
Moved file '/home/mike/dev/node/sonar/moveme/test' to '/home/mike/dev/node/sonar/tome/test'.
Moved file '/home/mike/dev/node/sonar/moveme/test2' to '/home/mike/dev/node/sonar/tome/test2'.
Moved file '/home/mike/dev/node/sonar/moveme/test23' to '/home/mike/dev/node/sonar/tome/test23'.
Moved file '/home/mike/dev/node/sonar/moveme/test234' to '/home/mike/dev/node/sonar/tome/test234'.
Update: fs.promises functions with async/await
Inspired by ma11hew28's answer (shown here), here is the same thing as above but with the async functions in fs.promises. As noted by ma11hew28, this may have memory limitations versus fs.promises.opendir added in v12.12.0.
Quick code below.
//jshint esversion:8
//jshint node:true
const fs = require( 'fs' );
const path = require( 'path' );
const moveFrom = "/tmp/movefrom";
const moveTo = "/tmp/moveto";
// Make an async function that gets executed immediately
(async ()=>{
// Our starting point
try {
// Get the files as an array
const files = await fs.promises.readdir( moveFrom );
// Loop them all with the new for...of
for( const file of files ) {
// Get the full paths
const fromPath = path.join( moveFrom, file );
const toPath = path.join( moveTo, file );
// Stat the file to see if we have a file or dir
const stat = await fs.promises.stat( fromPath );
if( stat.isFile() )
console.log( "'%s' is a file.", fromPath );
else if( stat.isDirectory() )
console.log( "'%s' is a directory.", fromPath );
// Now move async
await fs.promises.rename( fromPath, toPath );
// Log because we're crazy
console.log( "Moved '%s'->'%s'", fromPath, toPath );
} // End for...of
}
catch( e ) {
// Catch anything bad that happens
console.error( "We've thrown! Whoops!", e );
}
})(); // Wrap in parenthesis and call now
fs.readdir(path[, options], callback) (which Mikey A. Leonetti used in his answer) and its variants (fsPromises.readdir(path[, options]) and fs.readdirSync(path[, options])) each reads all of a directory's entries into memory at once. That's good for most cases, but if the directory has very many entries and/or you want to lower your application's memory footprint, you could instead iterate over the directory's entries one at a time.
Asynchronously
Directories are async iterable, so you could do something like this:
const fs = require('fs')
async function ls(path) {
const dir = await fs.promises.opendir(path)
for await (const dirent of dir) {
console.log(dirent.name)
}
}
ls('.').catch(console.error)
Or, you could use dir.read() and/or dir.read(callback) directly.
Synchronously
Directories aren't sync iterable, but you could use dir.readSync() directly. For example:
const fs = require('fs')
const dir = fs.opendirSync('.')
let dirent
while ((dirent = dir.readSync()) !== null) {
console.log(dirent.name)
}
dir.closeSync()
Or, you could make directories sync iterable. For example:
const fs = require('fs')
function makeDirectoriesSyncIterable() {
const p = fs.Dir.prototype
if (p.hasOwnProperty(Symbol.iterator)) { return }
const entriesSync = function* () {
try {
let dirent
while ((dirent = this.readSync()) !== null) { yield dirent }
} finally { this.closeSync() }
}
if (!p.hasOwnProperty(entriesSync)) { p.entriesSync = entriesSync }
Object.defineProperty(p, Symbol.iterator, {
configurable: true,
enumerable: false,
value: entriesSync,
writable: true
})
}
makeDirectoriesSyncIterable()
And then, you could do something like this:
const dir = fs.opendirSync('.')
for (const dirent of dir) {
console.log(dirent.name)
}
Note: "In busy processes, use the asynchronous versions of these calls. The synchronous versions will block the entire process until they complete, halting all connections."
References:
Node.js Documentation: File System: Class fs.Dir
Node.js source code: fs.Dir
GitHub: nodejs/node: Issues: streaming / iterative fs.readdir #583
Read all folders in a directory
const readAllFolder = (dirMain) => {
const readDirMain = fs.readdirSync(dirMain);
console.log(dirMain);
console.log(readDirMain);
readDirMain.forEach((dirNext) => {
console.log(dirNext, fs.lstatSync(dirMain + "/" + dirNext).isDirectory());
if (fs.lstatSync(dirMain + "/" + dirNext).isDirectory()) {
readAllFolder(dirMain + "/" + dirNext);
}
});
};
The answers provided are for a single folder. Here is an asynchronous implementation for multiple folders where all the folders are processed simultaneously but the smaller folders or files gets completed first.
Please comment if you have any feedback
Asynchronously Multiple Folders
const fs = require('fs')
const util = require('util')
const path = require('path')
// Multiple folders list
const in_dir_list = [
'Folder 1 Large',
'Folder 2 Small', // small folder and files will complete first
'Folder 3 Extra Large'
]
// BEST PRACTICES: (1) Faster folder list For loop has to be outside async_capture_callback functions for async to make sense
// (2) Slower Read Write or I/O processes best be contained in an async_capture_callback functions because these processes are slower than for loop events and faster completed items get callback-ed out first
for (i = 0; i < in_dir_list.length; i++) {
var in_dir = in_dir_list[i]
// function is created (see below) so each folder is processed asynchronously for readFile_async that follows
readdir_async_capture(in_dir, function(files_path) {
console.log("Processing folders asynchronously ...")
for (j = 0; j < files_path.length; j++) {
file_path = files_path[j]
file = file_path.substr(file_path.lastIndexOf("/") + 1, file_path.length)
// function is created (see below) so all files are read simultaneously but the smallest file will be completed first and get callback-ed first
readFile_async_capture(file_path, file, function(file_string) {
try {
console.log(file_path)
console.log(file_string)
} catch (error) {
console.log(error)
console.log("System exiting first to catch error if not async will continue...")
process.exit()
}
})
}
})
}
// fs.readdir async_capture function to deal with asynchronous code above
function readdir_async_capture(in_dir, callback) {
fs.readdir(in_dir, function(error, files) {
if (error) { return console.log(error) }
files_path = files.map(function(x) { return path.join(in_dir, x) })
callback(files_path)
})
}
// fs.readFile async_capture function to deal with asynchronous code above
function readFile_async_capture(file_path, file, callback) {
fs.readFile(file_path, function(error, data) {
if (error) { return console.log(error) }
file_string = data.toString()
callback(file_string)
})
}

Categories

Resources