Can I configure console.log so that the logs are written on a file instead of being printed in the console?
You could also just overload the default console.log function:
var fs = require('fs');
var util = require('util');
var log_file = fs.createWriteStream(__dirname + '/debug.log', {flags : 'w'});
var log_stdout = process.stdout;
console.log = function(d) { //
log_file.write(util.format(d) + '\n');
log_stdout.write(util.format(d) + '\n');
};
Above example will log to debug.log and stdout.
Edit: See multiparameter version by Clément also on this page.
Update 2013 - This was written around Node v0.2 and v0.4; There are much better utilites now around logging. I highly recommend Winston
Update Late 2013 - We still use winston, but now with a logger library to wrap the functionality around logging of custom objects and formatting. Here is a sample of our logger.js https://gist.github.com/rtgibbons/7354879
Should be as simple as this.
var access = fs.createWriteStream(dir + '/node.access.log', { flags: 'a' })
, error = fs.createWriteStream(dir + '/node.error.log', { flags: 'a' });
// redirect stdout / stderr
proc.stdout.pipe(access);
proc.stderr.pipe(error);
If you are looking for something in production winston is probably the best choice.
If you just want to do dev stuff quickly, output directly to a file (I think this works only for *nix systems):
nohup node simple-server.js > output.log &
I often use many arguments to console.log() and console.error(), so my solution would be:
var fs = require('fs');
var util = require('util');
var logFile = fs.createWriteStream('log.txt', { flags: 'a' });
// Or 'w' to truncate the file every time the process starts.
var logStdout = process.stdout;
console.log = function () {
logFile.write(util.format.apply(null, arguments) + '\n');
logStdout.write(util.format.apply(null, arguments) + '\n');
}
console.error = console.log;
Winston is a very-popular npm-module used for logging.
Here is a how-to.
Install winston in your project as:
npm install winston --save
Here's a configuration ready to use out-of-box that I use frequently in my projects as logger.js under utils.
/**
* Configurations of logger.
*/
const winston = require('winston');
const winstonRotator = require('winston-daily-rotate-file');
const consoleConfig = [
new winston.transports.Console({
'colorize': true
})
];
const createLogger = new winston.Logger({
'transports': consoleConfig
});
const successLogger = createLogger;
successLogger.add(winstonRotator, {
'name': 'access-file',
'level': 'info',
'filename': './logs/access.log',
'json': false,
'datePattern': 'yyyy-MM-dd-',
'prepend': true
});
const errorLogger = createLogger;
errorLogger.add(winstonRotator, {
'name': 'error-file',
'level': 'error',
'filename': './logs/error.log',
'json': false,
'datePattern': 'yyyy-MM-dd-',
'prepend': true
});
module.exports = {
'successlog': successLogger,
'errorlog': errorLogger
};
And then simply import wherever required as this:
const errorLog = require('../util/logger').errorlog;
const successlog = require('../util/logger').successlog;
Then you can log the success as:
successlog.info(`Success Message and variables: ${variable}`);
and Errors as:
errorlog.error(`Error Message : ${error}`);
It also logs all the success-logs and error-logs in a file under logs directory date-wise as you can see here.
const fs = require("fs");
const {keys} = Object;
const {Console} = console;
/**
* Redirect console to a file. Call without path or with false-y
* value to restore original behavior.
* #param {string} [path]
*/
function file(path) {
const con = path ? new Console(fs.createWriteStream(path)) : null;
keys(Console.prototype).forEach(key => {
if (path) {
this[key] = (...args) => con[key](...args);
} else {
delete this[key];
}
});
};
// patch global console object and export
module.exports = console.file = file;
To use it, do something like:
require("./console-file");
console.file("/path/to.log");
console.log("write to file!");
console.error("also write to file!");
console.file(); // go back to writing to stdout
For simple cases, we could redirect the Standard Out (STDOUT) and Standard Error (STDERR) streams directly to a file(say, test.log) using '>' and '2>&1'
Example:
// test.js
(function() {
// Below outputs are sent to Standard Out (STDOUT) stream
console.log("Hello Log");
console.info("Hello Info");
// Below outputs are sent to Standard Error (STDERR) stream
console.error("Hello Error");
console.warn("Hello Warning");
})();
node test.js > test.log 2>&1
As per the POSIX standard, 'input', 'output' and 'error' streams are identified by the positive integer file descriptors (0, 1, 2). i.e., stdin is 0, stdout is 1, and stderr is 2.
Step 1: '2>&1' will redirect from 2 (stderr) to 1 (stdout)
Step 2: '>' will redirect from 1 (stdout) to file (test.log)
If this is for an application, you're probably better off using a logging module. It'll give you more flexibility. Some suggestions.
winston https://github.com/winstonjs/winston
log4js https://github.com/nomiddlename/log4js-node
Straight from nodejs's API docs on Console
const output = fs.createWriteStream('./stdout.log');
const errorOutput = fs.createWriteStream('./stderr.log');
// custom simple logger
const logger = new Console(output, errorOutput);
// use it like console
const count = 5;
logger.log('count: %d', count);
// in stdout.log: count 5
Another solution not mentioned yet is by hooking the Writable streams in process.stdout and process.stderr. This way you don't need to override all the console functions that output to stdout and stderr. This implementation redirects both stdout and stderr to a log file:
var log_file = require('fs').createWriteStream(__dirname + '/log.txt', {flags : 'w'})
function hook_stream(stream, callback) {
var old_write = stream.write
stream.write = (function(write) {
return function(string, encoding, fd) {
write.apply(stream, arguments) // comments this line if you don't want output in the console
callback(string, encoding, fd)
}
})(stream.write)
return function() {
stream.write = old_write
}
}
console.log('a')
console.error('b')
var unhook_stdout = hook_stream(process.stdout, function(string, encoding, fd) {
log_file.write(string, encoding)
})
var unhook_stderr = hook_stream(process.stderr, function(string, encoding, fd) {
log_file.write(string, encoding)
})
console.log('c')
console.error('d')
unhook_stdout()
unhook_stderr()
console.log('e')
console.error('f')
It should print in the console
a
b
c
d
e
f
and in the log file:
c
d
For more info, check this gist.
Overwriting console.log is the way to go. But for it to work in required modules, you also need to export it.
module.exports = console;
To save yourself the trouble of writing log files, rotating and stuff, you might consider using a simple logger module like winston:
// Include the logger module
var winston = require('winston');
// Set up log file. (you can also define size, rotation etc.)
winston.add(winston.transports.File, { filename: 'somefile.log' });
// Overwrite some of the build-in console functions
console.error = winston.error;
console.log = winston.info;
console.info = winston.info;
console.debug = winston.debug;
console.warn = winston.warn;
module.exports = console;
If you're using linux, you can also use output redirection. Not sure about Windows.
node server.js >> file.log 2>> file.log
>> file.log to redirect stdout to the file
2>> file.log to redirect stderr to the file
others use the shorthand &>> for both stdout and stderr but it's not accepted by both my mac and ubuntu :(
extra: > overwrites, while >> appends.
By the way, regarding NodeJS loggers, I use pino + pino-pretty logger
METHOD STDOUT AND STDERR
This approach can help you (I use something similar in my projects) and works for all methods including console.log, console.warn, console.error, console.info
This method write the bytes written in stdout and stderr to file. Is better than changing console.log, console.warn, console.error, console.info methods, because output will be exact the same as this methods output
var fs= require("fs")
var os= require("os")
var HOME= os.homedir()
var stdout_r = fs.createWriteStream(HOME + '/node.stdout.log', { flags: 'a' })
var stderr_r = fs.createWriteStream(HOME + '/node.stderr.log', { flags: 'a' })
var attachToLog= function(std, std_new){
var originalwrite= std.write
std.write= function(data,enc){
try{
var d= data
if(!Buffer.isBuffer(d))
d= Buffer.from(data, (typeof enc === 'string') ? enc : "utf8")
std_new.write.apply(std_new, d)
}catch(e){}
return originalwrite.apply(std, arguments)
}
}
attachToLog(process.stdout, stdout_r)
attachToLog(process.stderr, stderr_r)
// recommended catch error on stdout_r and stderr_r
// stdout_r.on("error", yourfunction)
// stderr_r.on("error", yourfunction)
Adding to the answer above, a lit bit of an expansion to the short and efficient code overriding console.log. Minor additions: set filename with date, wrapper function, also do the original console.logging to keep the console active with the info.
Usage: in the beginning of your code, run setConsoleLogToFile([FILENAME]).
const fs = require("fs"),
util = require('util');
const getPrettyDate = ()=> new Date().toString().replace(":","-").replace(/00\s\(.*\)/, "").replace(` ${new Date().getFullYear()}`, ",").replace(/:\d\d\s/, " ");
module.exports.getPrettyDate = getPrettyDate;
module.exports.setConsoleLogToFile = (filename) => {
const log_file = fs.createWriteStream(`${__dirname}/${filename} - ${getPrettyDate()}.log`, { flags: 'w' }),
log_stdout = process.stdout;
const origConsole = console.log;
console.log = (d) => {
origConsole(d);
log_file.write(util.format(d) + '\n');
log_stdout.write(util.format(d) + '\n');
};
}
Most logger is overkill and does not support the build in console.log correctly. Hence I create console-log-to-file:
import { consoleLogToFile } from "console-log-to-file";
// or `const { consoleLogToFile } = require("console-log-to-file/dist/index.cjs.js")`
consoleLogToFile({
logFilePath: "/log/default.log",
});
// all of your console.log/warn/error/info will work as it does and save to file now.
If you are looking for a solution without modifying any code, here is a simple solution.
It requires pm2, just add it to your node modules and start you app with
pm2 start server.js
And you are done, console.logs are now automatically registered under home/.pm2/logs/server-out.log.
Improve on Andres Riofrio , to handle any number of arguments
var fs = require('fs');
var util = require('util');
var log_file = fs.createWriteStream(__dirname + '/debug.log', {flags : 'w'});
var log_stdout = process.stdout;
console.log = function(...args) {
var output = args.join(' ');
log_file.write(util.format(output) + '\r\n');
log_stdout.write(util.format(output) + '\r\n');
};
You can now use Caterpillar which is a streams based logging system, allowing you to log to it, then pipe the output off to different transforms and locations.
Outputting to a file is as easy as:
var logger = new (require('./').Logger)();
logger.pipe(require('fs').createWriteStream('./debug.log'));
logger.log('your log message');
Complete example on the Caterpillar Website
For future users. #keshavDulal answer doesn't work for latest version. And I couldn't find a proper fix for the issues that are reporting in the latest version 3.3.3.
Anyway I finally fixed it after researching a bit. Here is the solution for winston version 3.3.3
Install winston and winston-daily-rotate-file
npm install winston
npm install winston-daily-rotate-file
Create a new file utils/logger.js
const winston = require('winston');
const winstonRotator = require('winston-daily-rotate-file');
var logger = new winston.createLogger({
transports: [
new (winston.transports.DailyRotateFile)({
name: 'access-file',
level: 'info',
filename: './logs/access.log',
json: false,
datePattern: 'yyyy-MM-DD',
prepend: true,
maxFiles: 10
}),
new (winston.transports.DailyRotateFile)({
name: 'error-file',
level: 'error',
filename: './logs/error.log',
json: false,
datePattern: 'yyyy-MM-DD',
prepend: true,
maxFiles: 10
})
]
});
module.exports = {
logger
};
Then in any file where you want to use logging import the module like
const logger = require('./utils/logger').logger;
Use logger like the following:
logger.info('Info service started');
logger.error('Service crashed');
if you are using forever to keep your node app running, then typing forever list will show you the path to the log file that console.log is writing too
You can use the nodejs Console constructor
const mylog = new console.Console(
fs.createWriteStream("log/logger.log"),
fs.createWriteStream("log/error.log")
);
And then you can use it just like the normal console class, eg:
mylog.log("Ok!"); // Will be written into 'log/logger.log'
mylog.error("Bad!"); // Will be written into 'log/error.log'
You can also have a look at this npm module:
https://www.npmjs.com/package/noogger
noogger
simple and straight forward...
I took on the idea of swapping the output stream to a my stream.
const LogLater = require ('./loglater.js');
var logfile=new LogLater( 'log'+( new Date().toISOString().replace(/[^a-zA-Z0-9]/g,'-') )+'.txt' );
var PassThrough = require('stream').PassThrough;
var myout= new PassThrough();
var wasout=console._stdout;
myout.on('data',(data)=>{logfile.dateline("\r\n"+data);wasout.write(data);});
console._stdout=myout;
var myerr= new PassThrough();
var waserr=console._stderr;
myerr.on('data',(data)=>{logfile.dateline("\r\n"+data);waserr.write(data);});
console._stderr=myerr;
loglater.js:
const fs = require('fs');
function LogLater(filename, noduplicates, interval) {
this.filename = filename || "loglater.txt";
this.arr = [];
this.timeout = false;
this.interval = interval || 1000;
this.noduplicates = noduplicates || true;
this.onsavetimeout_bind = this.onsavetimeout.bind(this);
this.lasttext = "";
process.on('exit',()=>{ if(this.timeout)clearTimeout(this.timeout);this.timeout=false; this.save(); })
}
LogLater.prototype = {
_log: function _log(text) {
this.arr.push(text);
if (!this.timeout) this.timeout = setTimeout(this.onsavetimeout_bind, this.interval);
},
text: function log(text, loglastline) {
if (this.noduplicates) {
if (this.lasttext === text) return;
this.lastline = text;
}
this._log(text);
},
line: function log(text, loglastline) {
if (this.noduplicates) {
if (this.lasttext === text) return;
this.lastline = text;
}
this._log(text + '\r\n');
},
dateline: function dateline(text) {
if (this.noduplicates) {
if (this.lasttext === text) return;
this.lastline = text;
}
this._log(((new Date()).toISOString()) + '\t' + text + '\r\n');
},
onsavetimeout: function onsavetimeout() {
this.timeout = false;
this.save();
},
save: function save() { fs.appendFile(this.filename, this.arr.splice(0, this.arr.length).join(''), function(err) { if (err) console.log(err.stack) }); }
}
module.exports = LogLater;
I just build a pack to do this, hope you like it ;)
https://www.npmjs.com/package/writelog
I for myself simply took the example from winston and added the log(...) method (because winston names it info(..):
Console.js:
"use strict"
// Include the logger module
const winston = require('winston');
const logger = winston.createLogger({
level: 'info',
format: winston.format.json(),
transports: [
//
// - Write to all logs with level `info` and below to `combined.log`
// - Write all logs error (and below) to `error.log`.
//
new winston.transports.File({ filename: 'error.log', level: 'error' }),
new winston.transports.File({ filename: 'combined.log' })
]
});
//
// If we're not in production then log to the `console` with the format:
// `${info.level}: ${info.message} JSON.stringify({ ...rest }) `
//
if (process.env.NODE_ENV !== 'production') {
logger.add(new winston.transports.Console({
format: winston.format.simple()
}));
}
// Add log command
logger.log=logger.info;
module.exports = logger;
Then simply use in your code:
const console = require('Console')
Now you can simply use the normal log functions in your file and it will create a file AND log it to your console (while debugging/developing). Because of if (process.env.NODE_ENV !== 'production') { (in case you want it also in production)...
Create a utils/logger.js file with:
var fs = require('fs');
var util = require('util');
var log_file = fs.createWriteStream(__dirname + '/../logs/server.log', { flags: 'w' });
var log_stdout = process.stdout;
console.log = function () { //
[...arguments].forEach(element => {
log_file.write(util.format(element) + '\n');
log_stdout.write(util.format(element) + '\n');
});
};
module.exports = {
console
}
Include the logger.js file from any file where you want to console.log like:
const console = require('./utils/logger').console;
Create a logs folder and create an empty server.log file in it and run your app :)
Rudy Huynh's solution worked really well for me. I added a little bit to have it spit out files with today's date and time.
var dateNow = new Date();
var timeNow = dateNow.getHours() + '-' + dateNow.getMinutes();
var logPath = "log/" + dateNow.toDateString() + ' -' + ' Start Time - ' + timeNow + ".log"
consoleLogToFile({
logFilePath: logPath
});
It's not very elegant but this way it'll save different, easy to read, log files instead of just updating the same "default.log" file.
Based on multiparameter version by Clément, just without color codes for the text file
var fs = require('fs');
var util = require('util');
var logFile = fs.createWriteStream('log.txt', { flags: 'a' });
// Or 'w' to truncate the file every time the process starts.
var logStdout = process.stdout;
console.log = function () {
// Storing without color codes
logFile.write(util.format.apply(null,arguments).replace(/\033\[[0-9;]*m/g,"") + '\n');
// Display normally, with colors to Stdout
logStdout.write(util.format.apply(null, arguments) + '\n');
}
Note: Answering since I couldn't comment
I have an issue using mongoose.
The application I am writing consists in a file watcher that notifies clients about certain events via email and socketio messages.
I made an example that shows the problem:
basically, there is a class called mainFileWatcher that contains a submodule which in turn watches for new files or folders created in the script directory, emitting an "event" event when that happens. The mainFileWatcher listens for that event and calls a static method of a mongoose Client model.
If you run the script setting REPL=true you'll be able to access a watcher.submodule object and manually emit an "event" event.
Now, if you manually trigger the event, you'll see a statement that
says that the "event" event was triggered and an email address as a response.
Otherwhise, if you create a file or a folder in the script folder, you'll
just see that statement. Actually, if you run the script with REPL=true
you'll get the email only after pressing any key, nothing otherwise.
The fact that you don't get the email address as a response means to me that
the code in the promise in mongoose model doesn't get called for some reason.
Here is the code, sorry I couldn't make it shorter
// run
// npm install mongoose bluebird inotify underscore
//
// If you run the script with REPL=true you get an interactive version
// that has as context the filewatcher that emit events in my original code.
// It can be acessed through the watcher.submodule object.
// The watcher triggers a "event" event when you create a file or a folder in the script
// directory.
//
// If you emit manually an "event" event with the watcher.submodule in the repl, you should see a
// statement that "event" was triggered and
// an email address (that belongs to a fake client created by the bootstrap
// function at startup).
// If instead you create a file or a folder in the script folder (or whatever folder you have setted),
// you should see this time you'll have no email response. Actually, if you run with REPL=true,
// you'll have no email response untill you press any key. If you run without REPL, you'll
// have no email response.
'use strict';
var mongoose = require('mongoose');
//mongoose.set('debug', true);
mongoose.Promise = require('bluebird');
var Schema = mongoose.Schema;
var EventEmitter = require('events');
var util = require('util');
var _ = require("underscore");
var Inotify = require('inotify').Inotify;
var inotify = new Inotify();
// Schema declaration
var clientSchema = new Schema({
emails: {
type: [String]
}
}, {strict:false});
clientSchema.statics.findMailSubscriptions = function (subscription, cb) {
this.find({
subscriptions: {$in: [subscription]},
mailNotifications: true
}).exec().then(function (clients) {
if(!clients || clients.length === 0) return cb(null, null);
var emails = [];
clients.forEach(function (client) {
Array.prototype.push.apply(emails, client.emails)
});
return cb(null, emails);
}).catch(function(err){
console.error(err);
})
};
var clientModel = mongoose.model('Client', clientSchema);
// Mongoose connection
mongoose.connect('mongodb://localhost:27017/test', function (err, db) {
if (err) console.error('Mongoose connect error: ', err);
});
mongoose.connection.on('connected', function () {
console.log('Mongoose connected');
});
// bootstrap function: it inserts a fake client in the database
function bootstrap() {
clientModel.findOne({c_id: "testClient"}).then(function (c) {
if(!c) {
var new_c = new clientModel({
"name": "Notifier",
"c_id": "testClient",
"subscriptions": ["orders"],
"registeredTo": "Daniele",
"emails": ["email#example.com"],
"mailNotifications": true
});
new_c.save(function(err){
if (err) console.error('Bootstrap Client Error while saving: ', err.message );
});
}
});
}
// submodule of the main file watcher: it looks for new files created in the script dir
var submoduleOfFileWatcher = function() {
var _this = this;
var handler = function (event) {
var mask = event.mask;
var type = mask & Inotify.IN_ISDIR ? 'directory' : 'file';
if (mask & Inotify.IN_CREATE) {
_this.emit("event", event);
}
}
var watcher = {
path: '.', // here you can change the path to watch if you want
watch_for: Inotify.IN_CREATE,
callback: handler
}
EventEmitter.call(this);
this.in_id = inotify.addWatch(watcher);
}
util.inherits(submoduleOfFileWatcher, EventEmitter);
// Main File Watcher (it contains all the submodules and listensto the events emitted by them)
var mainFileWatcher = function () {
this.start = function() {
bootstrap();
_.bindAll(this, "onEvent");
this.submodule = new submoduleOfFileWatcher();
this.submodule.on("event", this.onEvent)
};
this.onEvent = function() {
console.log("event triggered");
clientModel.findMailSubscriptions("orders", function(err, mails) {
if (err) console.error(err);
console.log(mails); // THIS IS THE CODE THAT OUTPUTS ONLY IF YOU TRIGGER THE "event" EVENT manually
})
}
}
// Instantiate and start the watcher
var watcher = new mainFileWatcher()
watcher.start();
// start the repl if asked
if (process.env.REPL === "true") {
var repl = require('repl');
var replServer = repl.start({
prompt: 'filewatcher via stdin> ',
input: process.stdin,
output: process.stdout
});
replServer.context.watcher = watcher;
}
Just copy and paste the code, install deps and run it.
Things I tried:
I changed the mongoose Promise object to use bluebird promises, hoping that
I could intercept some exception.
I browsed Mongoose calls with node-inspector and indeed the find method gets called and it seems that it throws no exceptions. I really can't figure out what's happening because I don't get any errors at all.
It is not the database connection (I tried to open one just before the findMailSubscriptions call and got an exception for trying to open an already opened connection).
I figure it might be some issues with scopes or promises.
Is there something I am missing about mongoose, or is it just my code that causes this behaviour?
In MyModule folder, I have this two JS files.
SayHello.js
module.exports.SayHello = function() {
return('Hello !');
}
SayByeBye.js
module.exports.SayByeBye = function() {
return('Bye Bye!');
}
In Node.js, I want to require all files in MyModule folder and call function SayHello & SayByeBye directly something like:
require(./MyModule)
console.log(SayHello());
console.log(SayByeBye());
EDIT:
With answer of #Yanick Rochon,I do this :
> ./app/my-module/index.js
global.SayHello = require('./my-module/SayHello').SayHello;
global.SayByeBye = require('./my-module/SayByeBye').SayByeBye;
> ./app/my-module/say-hello.js
module.exports.SayHello = function() {
return('Hello !');
};
> ./app/my-module/say-byebye.js
module.exports.SayByeBye = function() {
return('Bye Bye !');
};
> ./app/main.js
require('./my-module');
console.log(SayHello());
console.log(SayByeBye());
There's a section about global objects in the node documentation.
However, globals should be used with care. By adding modules to the global space I reduce testability and encapsulation. But in this case, I think using this method is acceptable.
First thing first...
I believe you are mistaking Node.js with PHP or .Net, in the sense that you don't "import" into the current module what is exported in other modules. Not unless you manually do it anyway. For example, when you call
require('./my-module');
(Note that I renamed your MyModule into Node.js naming convention.)
You don't load things into the current context; you just load the script and don't assign it to anything. To access what my-module exposes, you need to assign it, or use it directly. For example :
require('./my-module').someFunction();
or
var myModule = require('./my-module');
myModule.someFunction();
Modules are not namespaces, but JavaScript objects that exposes public properties outside of their own contexts (i.e. using module.exports = ...)
Answer
You have two most popular ways to accomplish this :
Solution 1
Create an index.json file inside your folder where you want to load all of your scripts. The returned JSON object should be all the modules to load automatically :
> ./app/index.json
[
"say-hello.js",
"say-goodbye.js"
]
You should also consider having all your files API compatible :
> ./app/say-hello.js
module.exports = function sayHello() {
return 'Hello !';
};
> ./app/say-goodbye.js
module.exports.sayGoodbye = function () {
return 'Goodbye !';
};
Then load and execute everything like this :
var path = require('path');
var basePath = './app/';
var files = require(basePath);
var mods = files.forEach(function (loaded, file) {
var mod = require(path.join(basePath, file));
// mod is a function with a name, so use it!
if (mod instanceof Function) {
loaded[mod.name] = mod;
} else {
Object.keys(mod).forEach(function (property) {
loaded[property] = mod.property;
});
}
}, {});
mods.sayHello();
mods.sayGoodbye();
Solution 2
Read all .js files inside your folder and import them. I highly recommend you use glob for this.
var glob = require("glob")
var path = require('path');
var basePath = './app/';
var mods = glob.sync(path.join(basePath, '*.js')).reduce(function (loaded, file) {
var mod = require(file);
// mod is a function with a name, so use it!
if (mod instanceof Function) {
loaded[mod.name] = mod;
} else {
Object.keys(mod).forEach(function (property) {
loaded[property] = mod.property;
});
}
return loaded;
}, {});
mods.sayHello();
mods.sayGoodbye();
Note on the difference between module.exports and exports
Typically module.exports === exports, but it is recommended to use module.exports for the following reason
exports = function Foo() { } // will not do anything
module.exports = function Foo() { } // but this will do what you expect
// however these two lines produce the same result
exports.foo = 'Bar';
module.exports.foo = 'Bar';
For this reason, module.exports is recommended in all cases.
It's not perfect, but something like this should help you accomplish this:
var fs = require('fs');
var path = require('path');
var files = fs.readdirSync(__dirname);
var ownFilename = __filename.substr(__filename.lastIndexOf(path.delimiter) + 1);
var modules = {};
for (var i = 0; i < files.length; i++) {
var filename = files[i];
if (filename.substr(-3) === '.js' && filename !== ownFilename) {
modules[filename.slice(0, -3)] = require('./' + filename);
}
}
console.log(modules.SayByeBye());
console.log(modules.SayHello());
I've created application which is handle some basic actions like save(e.g. saving a file in file system) edit etc ,now I want that some users will have the ability to extend this functionality with new actions,for example user
will clone the application and add additional file with new actions (and some callback) and then register on some event
I should this new actions under the hood,my question is how can I take this new actions from new file and run then in my process,simple example will be very helpful
UPDATE
lets assume that this is my file that handle the action and user want to add additional action like delete
var fs = require('fs');
module.exports = {
fileAction: function (req, res, filePath) {
var urlAction = urlPath.substr(urlPath.lastIndexOf("/") + 1);
if (urlAction === 'save') {
this.save(req,res,filePath);
} else
this.delete(req,res,filePath);
},
save: function (req,res,filePath) {
var writeStream = fs.createWriteStream(filePath, {flags: 'w'});
req.pipe(writeStream);
res.writeHead(200, { 'Content-Type': 'text/plain' });
},
delete: function (req,res,filePath) {
},
}
and the delete code should be something like this
filePath = 'C://test.txt';
fs.unlinkSync(filePath);
Now as lordvlad suggest user should have a new file with his specific implementation which should be used by lordvlad suggestion design flow,my question is how to add this delete functionality(which is very simple) and make it work,like some POC for this.
I could imagine some plugin system like so:
main.js
// some setup
var EE = require('events').EventEmitter;
var glob = require('glob');
var eventBus = new EE();
// find plugins
glob("plugins/*.js", function(err, files) {
if (err) {
// some error handling here
}
files.forEach(function(file) {
var plugin = require(file);
plugin(eventBus);
});
});
plugins/my-plugin.js
module.exports = function(eventBus) {
// do something interesting
// listen for events on the event bus
eventBus.on("foo", function(e){
// do something
});
eventEmitter.emit("pluginReady", "my-plugin");
};
Of course you could substitute the event emitter with some global object, or make the plugin handle callbacks by passing a callback instead of an event bus. I think the key aspect is to load plugins (done within the glob ... require block) and and to make them fit into your system (which you will need to figure out yourself or provide some code samples of what you already have so somebody can give you another hint).
UPDATE after OPs update
main.js
var glob = require('glob');
var xtend = require('xtend');
module.exports = {
save: function(..){..},
load: function(..){..},
}
// typeof module.exports.delete === 'undefined',
// so you cannot call module.exports delete or similar
glob("plugins/*.js", function(err, files) {
files.forEach(function(file){
var plugin = require(file);
// call plugin initializer if available
if (typeof plugin.init === "function")
plugin.init();
xtend(module.exports, plugin);
});
// here all plugins are loaded and can be used
// i.e. you can do the following
module.exports.delete(...);
});
plugins/my-plugin.js
module.exports = {
delete: function(..){..},
init: function() {
// do something when the plugin loads
}
}
Mind though, that any plugin could overwrite another plugin's methods.