How to create a Node.js module from Asynchronous Function response? - javascript

This problem is in regards the creation of a Node module that depends on a async function to return the content. For instance, "src/index.js" is the following:
GOAL
The module A, implemented from "src/index" must be resolved and must not depend on promises, or anything else... It will just return a JSON object of computed values.
var a = require("./src/index");
// should be resolved already.
console.log(a.APP_NAME)
src/index.js
"use strict";
var CoreClass = require("./core-class");
var coreInstance = new CoreClass();
coreInstance.toJson(function(err, coreData) {
if (err) {
console.log("Error while loading " + __filename);
console.log(err);
return;
}
console.log(coreData);
// Export the data from the core.
module.exports = coreData;
});
src/core-class.js
The implementation of the method "toJson()", defined in the class in the file "src/core-class.js" is as follows:
/**
* #return {string} Overriding the toStrng to return the object properties.
*/
ISPCore.prototype.toJson = function toJson(callback) {
var result = {
// From package.json
APP_NAME: this.appPackageJson.name.trim(),
APP_VERSION: this.appPackageJson.version.trim(),
APP_CONFIG_DIR: this.APP_DIR + "/config",
APP_DOCS_DIR: this.APP_DIR + "/docs",
APP_TESTS_DIR: this.APP_DIR + "/tests",
};
// TODO: Remove this when we have a registry
if (!this.pom) {
// Let's verify if there's a pom.xml file in the roort APP_DIR
var _this = this;
this.APP_POM_PATH = this.APP_DIR + "/pom.xml";
// Check first to see if the file exists
fs.stat(this.APP_POM_PATH, function(err, fileStats) {
// The file does not exist, so we can continue with the current result.
if (err) {
return callback(null, result);
}
_this._loadPomXmlSettings(function pomXmlCallback(err, pomObject) {
if (err) {
return callback(err);
}
_this.pom = pomObject;
// Update the result with the pom information
result.POM_GROUPID = _this.pom.groupid || "undefined";
result.POM_ARTIFACTID = _this.pom.artifactid || "undefined";
result.POM_VERSION = _this.pom.version || "undefined";
// Callback with the updated version.
return callback(null, result);
});
});
} else {
result.POM_GROUPID = this.pom.groupid || "undefined";
result.POM_ARTIFACTID = this.pom.artifactId || "undefined";
result.POM_VERSION = this.pom.version || "undefined";
// Return just what's been collected so far, including the pom.
return callback(null, result);
}
};
Test class
Requiring this and trying to use the library just returns an empty object. Here's the test class...
// describing the method to get the instance.
describe("require(sp-core) with pom.xml", function() {
var core = null;
before(function(done) {
// Copy the fixture pom.xml to the APP_DIR
fs.writeFileSync(__dirname + "/../pom.xml", fs.readFileSync(__dirname + "/fixture/pom.xml"));
// Load the library after the creation of the pom
core = require("../src/");
console.log("TEST AFTER CORE");
console.log(core);
done();
});
after(function(done) {
// Delete the pom.xml from the path
fs.unlinkSync(__dirname + "/../pom.xml");
done();
});
it("should load the properties with pom properties", function(done) {
expect(core).to.be.an("object");
console.log("Loaded pom.xml metadata");
console.log(core);
expect(core.POM_ARTIFACTID).to.exist;
expect(core.POM_VERSION).to.exist;
done();
});
});
Execution of the tests
However, after a while, the output from the library shows up in the console.
SPCore with pom.xml
require(sp-core) with pom.xml
TEST AFTER CORE
{}
Loaded pom.xml metadata
{}
1) should load the properties with pom properties
{ APP_NAME: 'sp-core',
APP_VERSION: '0.3.5',
ENV: 'development',
NODE_ENV: 'development',
IS_PROD: false,
APP_DIR: '/home/mdesales/dev/isp/sp-core',
APP_CONFIG_DIR: '/home/mdesales/dev/isp/sp-core/config',
APP_DOCS_DIR: '/home/mdesales/dev/isp/sp-core/docs',
APP_TESTS_DIR: '/home/mdesales/dev/isp/sp-core/tests',
POM_GROUPID: 'com.mycompany',
POM_ARTIFACTID: 'my-service',
POM_VERSION: '1.0.15-SNAPSHOT' }
0 passing (142ms)
1 failing
1) SPCore with pom.xml require(sp-core) with pom.xml should load the properties with pom properties:
AssertionError: expected undefined to exist
How to properly create a module that depends on an Async call?
I'm sure this is due to the asynchronous call, but I was thinking that the module would not return {}, but wait until the callback returns.
I tried using:
Async.waterfall
Deasync (does not work)
Async.waterfall attempt
"use strict";
var async = require("async");
var CoreClass = require("./core-class");
var coreInstance = new CoreClass();
async.waterfall([
function(cb) {
coreInstance.toJson(cb);
},
function(coreData) {
console.log(coreData);
module.exports = coreData;
}
]);
Please please help!

Following the comments, I revisited the attempt of using "deasync" module, and it WORKS! YES WE CAN! Cheating with the hack of "deasync" :D
Runnable instance
The runnable solution is at http://code.runnable.com/VbCksvKBUC4xu3rd/demo-that-an-async-method-can-be-returned-before-a-module-exports-is-resolved-for-node-js-deasync-pom-parser-and-stackoverflow-31577688
Type "npm test" in the terminal box and hit "ENTER" (always works).
Just click in the "Run" button to see the execution of the code. All the source code is available. (Sometimes the container gets corrupted and the test fails).
Solution
Here's the implementation of the "GOAL" module.
/** #module Default Settings */
"use strict";
var CoreClass = require("./core-class");
var merge = require("merge");
var deasync = require("deasync");
// Core properties needed.
var coreInstance = new CoreClass();
var coreProperties = coreInstance.toJson();
// Pom properties temporary support, deasync the async call
var loadPom = deasync(coreInstance.loadPomXmlSettings);
var pomObject = loadPom(coreProperties.APP_POM_PATH);
// Merge them all.
var allProperties = merge(coreProperties, pomObject);
module.exports = allProperties;
With that, all the code is returned as expected for the module.exports!

Related

How can I change the output of the console in javascript to a file? [duplicate]

Can I configure console.log so that the logs are written on a file instead of being printed in the console?
You could also just overload the default console.log function:
var fs = require('fs');
var util = require('util');
var log_file = fs.createWriteStream(__dirname + '/debug.log', {flags : 'w'});
var log_stdout = process.stdout;
console.log = function(d) { //
log_file.write(util.format(d) + '\n');
log_stdout.write(util.format(d) + '\n');
};
Above example will log to debug.log and stdout.
Edit: See multiparameter version by Clément also on this page.
Update 2013 - This was written around Node v0.2 and v0.4; There are much better utilites now around logging. I highly recommend Winston
Update Late 2013 - We still use winston, but now with a logger library to wrap the functionality around logging of custom objects and formatting. Here is a sample of our logger.js https://gist.github.com/rtgibbons/7354879
Should be as simple as this.
var access = fs.createWriteStream(dir + '/node.access.log', { flags: 'a' })
, error = fs.createWriteStream(dir + '/node.error.log', { flags: 'a' });
// redirect stdout / stderr
proc.stdout.pipe(access);
proc.stderr.pipe(error);
If you are looking for something in production winston is probably the best choice.
If you just want to do dev stuff quickly, output directly to a file (I think this works only for *nix systems):
nohup node simple-server.js > output.log &
I often use many arguments to console.log() and console.error(), so my solution would be:
var fs = require('fs');
var util = require('util');
var logFile = fs.createWriteStream('log.txt', { flags: 'a' });
// Or 'w' to truncate the file every time the process starts.
var logStdout = process.stdout;
console.log = function () {
logFile.write(util.format.apply(null, arguments) + '\n');
logStdout.write(util.format.apply(null, arguments) + '\n');
}
console.error = console.log;
Winston is a very-popular npm-module used for logging.
Here is a how-to.
Install winston in your project as:
npm install winston --save
Here's a configuration ready to use out-of-box that I use frequently in my projects as logger.js under utils.
/**
* Configurations of logger.
*/
const winston = require('winston');
const winstonRotator = require('winston-daily-rotate-file');
const consoleConfig = [
new winston.transports.Console({
'colorize': true
})
];
const createLogger = new winston.Logger({
'transports': consoleConfig
});
const successLogger = createLogger;
successLogger.add(winstonRotator, {
'name': 'access-file',
'level': 'info',
'filename': './logs/access.log',
'json': false,
'datePattern': 'yyyy-MM-dd-',
'prepend': true
});
const errorLogger = createLogger;
errorLogger.add(winstonRotator, {
'name': 'error-file',
'level': 'error',
'filename': './logs/error.log',
'json': false,
'datePattern': 'yyyy-MM-dd-',
'prepend': true
});
module.exports = {
'successlog': successLogger,
'errorlog': errorLogger
};
And then simply import wherever required as this:
const errorLog = require('../util/logger').errorlog;
const successlog = require('../util/logger').successlog;
Then you can log the success as:
successlog.info(`Success Message and variables: ${variable}`);
and Errors as:
errorlog.error(`Error Message : ${error}`);
It also logs all the success-logs and error-logs in a file under logs directory date-wise as you can see here.
const fs = require("fs");
const {keys} = Object;
const {Console} = console;
/**
* Redirect console to a file. Call without path or with false-y
* value to restore original behavior.
* #param {string} [path]
*/
function file(path) {
const con = path ? new Console(fs.createWriteStream(path)) : null;
keys(Console.prototype).forEach(key => {
if (path) {
this[key] = (...args) => con[key](...args);
} else {
delete this[key];
}
});
};
// patch global console object and export
module.exports = console.file = file;
To use it, do something like:
require("./console-file");
console.file("/path/to.log");
console.log("write to file!");
console.error("also write to file!");
console.file(); // go back to writing to stdout
For simple cases, we could redirect the Standard Out (STDOUT) and Standard Error (STDERR) streams directly to a file(say, test.log) using '>' and '2>&1'
Example:
// test.js
(function() {
// Below outputs are sent to Standard Out (STDOUT) stream
console.log("Hello Log");
console.info("Hello Info");
// Below outputs are sent to Standard Error (STDERR) stream
console.error("Hello Error");
console.warn("Hello Warning");
})();
node test.js > test.log 2>&1
As per the POSIX standard, 'input', 'output' and 'error' streams are identified by the positive integer file descriptors (0, 1, 2). i.e., stdin is 0, stdout is 1, and stderr is 2.
Step 1: '2>&1' will redirect from 2 (stderr) to 1 (stdout)
Step 2: '>' will redirect from 1 (stdout) to file (test.log)
If this is for an application, you're probably better off using a logging module. It'll give you more flexibility. Some suggestions.
winston https://github.com/winstonjs/winston
log4js https://github.com/nomiddlename/log4js-node
Straight from nodejs's API docs on Console
const output = fs.createWriteStream('./stdout.log');
const errorOutput = fs.createWriteStream('./stderr.log');
// custom simple logger
const logger = new Console(output, errorOutput);
// use it like console
const count = 5;
logger.log('count: %d', count);
// in stdout.log: count 5
Another solution not mentioned yet is by hooking the Writable streams in process.stdout and process.stderr. This way you don't need to override all the console functions that output to stdout and stderr. This implementation redirects both stdout and stderr to a log file:
var log_file = require('fs').createWriteStream(__dirname + '/log.txt', {flags : 'w'})
function hook_stream(stream, callback) {
var old_write = stream.write
stream.write = (function(write) {
return function(string, encoding, fd) {
write.apply(stream, arguments) // comments this line if you don't want output in the console
callback(string, encoding, fd)
}
})(stream.write)
return function() {
stream.write = old_write
}
}
console.log('a')
console.error('b')
var unhook_stdout = hook_stream(process.stdout, function(string, encoding, fd) {
log_file.write(string, encoding)
})
var unhook_stderr = hook_stream(process.stderr, function(string, encoding, fd) {
log_file.write(string, encoding)
})
console.log('c')
console.error('d')
unhook_stdout()
unhook_stderr()
console.log('e')
console.error('f')
It should print in the console
a
b
c
d
e
f
and in the log file:
c
d
For more info, check this gist.
Overwriting console.log is the way to go. But for it to work in required modules, you also need to export it.
module.exports = console;
To save yourself the trouble of writing log files, rotating and stuff, you might consider using a simple logger module like winston:
// Include the logger module
var winston = require('winston');
// Set up log file. (you can also define size, rotation etc.)
winston.add(winston.transports.File, { filename: 'somefile.log' });
// Overwrite some of the build-in console functions
console.error = winston.error;
console.log = winston.info;
console.info = winston.info;
console.debug = winston.debug;
console.warn = winston.warn;
module.exports = console;
If you're using linux, you can also use output redirection. Not sure about Windows.
node server.js >> file.log 2>> file.log
>> file.log to redirect stdout to the file
2>> file.log to redirect stderr to the file
others use the shorthand &>> for both stdout and stderr but it's not accepted by both my mac and ubuntu :(
extra: > overwrites, while >> appends.
By the way, regarding NodeJS loggers, I use pino + pino-pretty logger
METHOD STDOUT AND STDERR
This approach can help you (I use something similar in my projects) and works for all methods including console.log, console.warn, console.error, console.info
This method write the bytes written in stdout and stderr to file. Is better than changing console.log, console.warn, console.error, console.info methods, because output will be exact the same as this methods output
var fs= require("fs")
var os= require("os")
var HOME= os.homedir()
var stdout_r = fs.createWriteStream(HOME + '/node.stdout.log', { flags: 'a' })
var stderr_r = fs.createWriteStream(HOME + '/node.stderr.log', { flags: 'a' })
var attachToLog= function(std, std_new){
var originalwrite= std.write
std.write= function(data,enc){
try{
var d= data
if(!Buffer.isBuffer(d))
d= Buffer.from(data, (typeof enc === 'string') ? enc : "utf8")
std_new.write.apply(std_new, d)
}catch(e){}
return originalwrite.apply(std, arguments)
}
}
attachToLog(process.stdout, stdout_r)
attachToLog(process.stderr, stderr_r)
// recommended catch error on stdout_r and stderr_r
// stdout_r.on("error", yourfunction)
// stderr_r.on("error", yourfunction)
Adding to the answer above, a lit bit of an expansion to the short and efficient code overriding console.log. Minor additions: set filename with date, wrapper function, also do the original console.logging to keep the console active with the info.
Usage: in the beginning of your code, run setConsoleLogToFile([FILENAME]).
const fs = require("fs"),
util = require('util');
const getPrettyDate = ()=> new Date().toString().replace(":","-").replace(/00\s\(.*\)/, "").replace(` ${new Date().getFullYear()}`, ",").replace(/:\d\d\s/, " ");
module.exports.getPrettyDate = getPrettyDate;
module.exports.setConsoleLogToFile = (filename) => {
const log_file = fs.createWriteStream(`${__dirname}/${filename} - ${getPrettyDate()}.log`, { flags: 'w' }),
log_stdout = process.stdout;
const origConsole = console.log;
console.log = (d) => {
origConsole(d);
log_file.write(util.format(d) + '\n');
log_stdout.write(util.format(d) + '\n');
};
}
Most logger is overkill and does not support the build in console.log correctly. Hence I create console-log-to-file:
import { consoleLogToFile } from "console-log-to-file";
// or `const { consoleLogToFile } = require("console-log-to-file/dist/index.cjs.js")`
consoleLogToFile({
logFilePath: "/log/default.log",
});
// all of your console.log/warn/error/info will work as it does and save to file now.
If you are looking for a solution without modifying any code, here is a simple solution.
It requires pm2, just add it to your node modules and start you app with
pm2 start server.js
And you are done, console.logs are now automatically registered under home/.pm2/logs/server-out.log.
Improve on Andres Riofrio , to handle any number of arguments
var fs = require('fs');
var util = require('util');
var log_file = fs.createWriteStream(__dirname + '/debug.log', {flags : 'w'});
var log_stdout = process.stdout;
console.log = function(...args) {
var output = args.join(' ');
log_file.write(util.format(output) + '\r\n');
log_stdout.write(util.format(output) + '\r\n');
};
You can now use Caterpillar which is a streams based logging system, allowing you to log to it, then pipe the output off to different transforms and locations.
Outputting to a file is as easy as:
var logger = new (require('./').Logger)();
logger.pipe(require('fs').createWriteStream('./debug.log'));
logger.log('your log message');
Complete example on the Caterpillar Website
For future users. #keshavDulal answer doesn't work for latest version. And I couldn't find a proper fix for the issues that are reporting in the latest version 3.3.3.
Anyway I finally fixed it after researching a bit. Here is the solution for winston version 3.3.3
Install winston and winston-daily-rotate-file
npm install winston
npm install winston-daily-rotate-file
Create a new file utils/logger.js
const winston = require('winston');
const winstonRotator = require('winston-daily-rotate-file');
var logger = new winston.createLogger({
transports: [
new (winston.transports.DailyRotateFile)({
name: 'access-file',
level: 'info',
filename: './logs/access.log',
json: false,
datePattern: 'yyyy-MM-DD',
prepend: true,
maxFiles: 10
}),
new (winston.transports.DailyRotateFile)({
name: 'error-file',
level: 'error',
filename: './logs/error.log',
json: false,
datePattern: 'yyyy-MM-DD',
prepend: true,
maxFiles: 10
})
]
});
module.exports = {
logger
};
Then in any file where you want to use logging import the module like
const logger = require('./utils/logger').logger;
Use logger like the following:
logger.info('Info service started');
logger.error('Service crashed');
if you are using forever to keep your node app running, then typing forever list will show you the path to the log file that console.log is writing too
You can use the nodejs Console constructor
const mylog = new console.Console(
fs.createWriteStream("log/logger.log"),
fs.createWriteStream("log/error.log")
);
And then you can use it just like the normal console class, eg:
mylog.log("Ok!"); // Will be written into 'log/logger.log'
mylog.error("Bad!"); // Will be written into 'log/error.log'
You can also have a look at this npm module:
https://www.npmjs.com/package/noogger
noogger
simple and straight forward...
I took on the idea of swapping the output stream to a my stream.
const LogLater = require ('./loglater.js');
var logfile=new LogLater( 'log'+( new Date().toISOString().replace(/[^a-zA-Z0-9]/g,'-') )+'.txt' );
var PassThrough = require('stream').PassThrough;
var myout= new PassThrough();
var wasout=console._stdout;
myout.on('data',(data)=>{logfile.dateline("\r\n"+data);wasout.write(data);});
console._stdout=myout;
var myerr= new PassThrough();
var waserr=console._stderr;
myerr.on('data',(data)=>{logfile.dateline("\r\n"+data);waserr.write(data);});
console._stderr=myerr;
loglater.js:
const fs = require('fs');
function LogLater(filename, noduplicates, interval) {
this.filename = filename || "loglater.txt";
this.arr = [];
this.timeout = false;
this.interval = interval || 1000;
this.noduplicates = noduplicates || true;
this.onsavetimeout_bind = this.onsavetimeout.bind(this);
this.lasttext = "";
process.on('exit',()=>{ if(this.timeout)clearTimeout(this.timeout);this.timeout=false; this.save(); })
}
LogLater.prototype = {
_log: function _log(text) {
this.arr.push(text);
if (!this.timeout) this.timeout = setTimeout(this.onsavetimeout_bind, this.interval);
},
text: function log(text, loglastline) {
if (this.noduplicates) {
if (this.lasttext === text) return;
this.lastline = text;
}
this._log(text);
},
line: function log(text, loglastline) {
if (this.noduplicates) {
if (this.lasttext === text) return;
this.lastline = text;
}
this._log(text + '\r\n');
},
dateline: function dateline(text) {
if (this.noduplicates) {
if (this.lasttext === text) return;
this.lastline = text;
}
this._log(((new Date()).toISOString()) + '\t' + text + '\r\n');
},
onsavetimeout: function onsavetimeout() {
this.timeout = false;
this.save();
},
save: function save() { fs.appendFile(this.filename, this.arr.splice(0, this.arr.length).join(''), function(err) { if (err) console.log(err.stack) }); }
}
module.exports = LogLater;
I just build a pack to do this, hope you like it ;)
https://www.npmjs.com/package/writelog
I for myself simply took the example from winston and added the log(...) method (because winston names it info(..):
Console.js:
"use strict"
// Include the logger module
const winston = require('winston');
const logger = winston.createLogger({
level: 'info',
format: winston.format.json(),
transports: [
//
// - Write to all logs with level `info` and below to `combined.log`
// - Write all logs error (and below) to `error.log`.
//
new winston.transports.File({ filename: 'error.log', level: 'error' }),
new winston.transports.File({ filename: 'combined.log' })
]
});
//
// If we're not in production then log to the `console` with the format:
// `${info.level}: ${info.message} JSON.stringify({ ...rest }) `
//
if (process.env.NODE_ENV !== 'production') {
logger.add(new winston.transports.Console({
format: winston.format.simple()
}));
}
// Add log command
logger.log=logger.info;
module.exports = logger;
Then simply use in your code:
const console = require('Console')
Now you can simply use the normal log functions in your file and it will create a file AND log it to your console (while debugging/developing). Because of if (process.env.NODE_ENV !== 'production') { (in case you want it also in production)...
Create a utils/logger.js file with:
var fs = require('fs');
var util = require('util');
var log_file = fs.createWriteStream(__dirname + '/../logs/server.log', { flags: 'w' });
var log_stdout = process.stdout;
console.log = function () { //
[...arguments].forEach(element => {
log_file.write(util.format(element) + '\n');
log_stdout.write(util.format(element) + '\n');
});
};
module.exports = {
console
}
Include the logger.js file from any file where you want to console.log like:
const console = require('./utils/logger').console;
Create a logs folder and create an empty server.log file in it and run your app :)
Rudy Huynh's solution worked really well for me. I added a little bit to have it spit out files with today's date and time.
var dateNow = new Date();
var timeNow = dateNow.getHours() + '-' + dateNow.getMinutes();
var logPath = "log/" + dateNow.toDateString() + ' -' + ' Start Time - ' + timeNow + ".log"
consoleLogToFile({
logFilePath: logPath
});
It's not very elegant but this way it'll save different, easy to read, log files instead of just updating the same "default.log" file.
Based on multiparameter version by Clément, just without color codes for the text file
var fs = require('fs');
var util = require('util');
var logFile = fs.createWriteStream('log.txt', { flags: 'a' });
// Or 'w' to truncate the file every time the process starts.
var logStdout = process.stdout;
console.log = function () {
// Storing without color codes
logFile.write(util.format.apply(null,arguments).replace(/\033\[[0-9;]*m/g,"") + '\n');
// Display normally, with colors to Stdout
logStdout.write(util.format.apply(null, arguments) + '\n');
}
Note: Answering since I couldn't comment

Mongoose possible issue

I have an issue using mongoose.
The application I am writing consists in a file watcher that notifies clients about certain events via email and socketio messages.
I made an example that shows the problem:
basically, there is a class called mainFileWatcher that contains a submodule which in turn watches for new files or folders created in the script directory, emitting an "event" event when that happens. The mainFileWatcher listens for that event and calls a static method of a mongoose Client model.
If you run the script setting REPL=true you'll be able to access a watcher.submodule object and manually emit an "event" event.
Now, if you manually trigger the event, you'll see a statement that
says that the "event" event was triggered and an email address as a response.
Otherwhise, if you create a file or a folder in the script folder, you'll
just see that statement. Actually, if you run the script with REPL=true
you'll get the email only after pressing any key, nothing otherwise.
The fact that you don't get the email address as a response means to me that
the code in the promise in mongoose model doesn't get called for some reason.
Here is the code, sorry I couldn't make it shorter
// run
// npm install mongoose bluebird inotify underscore
//
// If you run the script with REPL=true you get an interactive version
// that has as context the filewatcher that emit events in my original code.
// It can be acessed through the watcher.submodule object.
// The watcher triggers a "event" event when you create a file or a folder in the script
// directory.
//
// If you emit manually an "event" event with the watcher.submodule in the repl, you should see a
// statement that "event" was triggered and
// an email address (that belongs to a fake client created by the bootstrap
// function at startup).
// If instead you create a file or a folder in the script folder (or whatever folder you have setted),
// you should see this time you'll have no email response. Actually, if you run with REPL=true,
// you'll have no email response untill you press any key. If you run without REPL, you'll
// have no email response.
'use strict';
var mongoose = require('mongoose');
//mongoose.set('debug', true);
mongoose.Promise = require('bluebird');
var Schema = mongoose.Schema;
var EventEmitter = require('events');
var util = require('util');
var _ = require("underscore");
var Inotify = require('inotify').Inotify;
var inotify = new Inotify();
// Schema declaration
var clientSchema = new Schema({
emails: {
type: [String]
}
}, {strict:false});
clientSchema.statics.findMailSubscriptions = function (subscription, cb) {
this.find({
subscriptions: {$in: [subscription]},
mailNotifications: true
}).exec().then(function (clients) {
if(!clients || clients.length === 0) return cb(null, null);
var emails = [];
clients.forEach(function (client) {
Array.prototype.push.apply(emails, client.emails)
});
return cb(null, emails);
}).catch(function(err){
console.error(err);
})
};
var clientModel = mongoose.model('Client', clientSchema);
// Mongoose connection
mongoose.connect('mongodb://localhost:27017/test', function (err, db) {
if (err) console.error('Mongoose connect error: ', err);
});
mongoose.connection.on('connected', function () {
console.log('Mongoose connected');
});
// bootstrap function: it inserts a fake client in the database
function bootstrap() {
clientModel.findOne({c_id: "testClient"}).then(function (c) {
if(!c) {
var new_c = new clientModel({
"name": "Notifier",
"c_id": "testClient",
"subscriptions": ["orders"],
"registeredTo": "Daniele",
"emails": ["email#example.com"],
"mailNotifications": true
});
new_c.save(function(err){
if (err) console.error('Bootstrap Client Error while saving: ', err.message );
});
}
});
}
// submodule of the main file watcher: it looks for new files created in the script dir
var submoduleOfFileWatcher = function() {
var _this = this;
var handler = function (event) {
var mask = event.mask;
var type = mask & Inotify.IN_ISDIR ? 'directory' : 'file';
if (mask & Inotify.IN_CREATE) {
_this.emit("event", event);
}
}
var watcher = {
path: '.', // here you can change the path to watch if you want
watch_for: Inotify.IN_CREATE,
callback: handler
}
EventEmitter.call(this);
this.in_id = inotify.addWatch(watcher);
}
util.inherits(submoduleOfFileWatcher, EventEmitter);
// Main File Watcher (it contains all the submodules and listensto the events emitted by them)
var mainFileWatcher = function () {
this.start = function() {
bootstrap();
_.bindAll(this, "onEvent");
this.submodule = new submoduleOfFileWatcher();
this.submodule.on("event", this.onEvent)
};
this.onEvent = function() {
console.log("event triggered");
clientModel.findMailSubscriptions("orders", function(err, mails) {
if (err) console.error(err);
console.log(mails); // THIS IS THE CODE THAT OUTPUTS ONLY IF YOU TRIGGER THE "event" EVENT manually
})
}
}
// Instantiate and start the watcher
var watcher = new mainFileWatcher()
watcher.start();
// start the repl if asked
if (process.env.REPL === "true") {
var repl = require('repl');
var replServer = repl.start({
prompt: 'filewatcher via stdin> ',
input: process.stdin,
output: process.stdout
});
replServer.context.watcher = watcher;
}
Just copy and paste the code, install deps and run it.
Things I tried:
I changed the mongoose Promise object to use bluebird promises, hoping that
I could intercept some exception.
I browsed Mongoose calls with node-inspector and indeed the find method gets called and it seems that it throws no exceptions. I really can't figure out what's happening because I don't get any errors at all.
It is not the database connection (I tried to open one just before the findMailSubscriptions call and got an exception for trying to open an already opened connection).
I figure it might be some issues with scopes or promises.
Is there something I am missing about mongoose, or is it just my code that causes this behaviour?

Node.js, require all modules in folder and use loaded module directly

In MyModule folder, I have this two JS files.
SayHello.js
module.exports.SayHello = function() {
return('Hello !');
}
SayByeBye.js
module.exports.SayByeBye = function() {
return('Bye Bye!');
}
In Node.js, I want to require all files in MyModule folder and call function SayHello & SayByeBye directly something like:
require(./MyModule)
console.log(SayHello());
console.log(SayByeBye());
EDIT:
With answer of #Yanick Rochon,I do this :
> ./app/my-module/index.js
global.SayHello = require('./my-module/SayHello').SayHello;
global.SayByeBye = require('./my-module/SayByeBye').SayByeBye;
> ./app/my-module/say-hello.js
module.exports.SayHello = function() {
return('Hello !');
};
> ./app/my-module/say-byebye.js
module.exports.SayByeBye = function() {
return('Bye Bye !');
};
> ./app/main.js
require('./my-module');
console.log(SayHello());
console.log(SayByeBye());
There's a section about global objects in the node documentation.
However, globals should be used with care. By adding modules to the global space I reduce testability and encapsulation. But in this case, I think using this method is acceptable.
First thing first...
I believe you are mistaking Node.js with PHP or .Net, in the sense that you don't "import" into the current module what is exported in other modules. Not unless you manually do it anyway. For example, when you call
require('./my-module');
(Note that I renamed your MyModule into Node.js naming convention.)
You don't load things into the current context; you just load the script and don't assign it to anything. To access what my-module exposes, you need to assign it, or use it directly. For example :
require('./my-module').someFunction();
or
var myModule = require('./my-module');
myModule.someFunction();
Modules are not namespaces, but JavaScript objects that exposes public properties outside of their own contexts (i.e. using module.exports = ...)
Answer
You have two most popular ways to accomplish this :
Solution 1
Create an index.json file inside your folder where you want to load all of your scripts. The returned JSON object should be all the modules to load automatically :
> ./app/index.json
[
"say-hello.js",
"say-goodbye.js"
]
You should also consider having all your files API compatible :
> ./app/say-hello.js
module.exports = function sayHello() {
return 'Hello !';
};
> ./app/say-goodbye.js
module.exports.sayGoodbye = function () {
return 'Goodbye !';
};
Then load and execute everything like this :
var path = require('path');
var basePath = './app/';
var files = require(basePath);
var mods = files.forEach(function (loaded, file) {
var mod = require(path.join(basePath, file));
// mod is a function with a name, so use it!
if (mod instanceof Function) {
loaded[mod.name] = mod;
} else {
Object.keys(mod).forEach(function (property) {
loaded[property] = mod.property;
});
}
}, {});
mods.sayHello();
mods.sayGoodbye();
Solution 2
Read all .js files inside your folder and import them. I highly recommend you use glob for this.
var glob = require("glob")
var path = require('path');
var basePath = './app/';
var mods = glob.sync(path.join(basePath, '*.js')).reduce(function (loaded, file) {
var mod = require(file);
// mod is a function with a name, so use it!
if (mod instanceof Function) {
loaded[mod.name] = mod;
} else {
Object.keys(mod).forEach(function (property) {
loaded[property] = mod.property;
});
}
return loaded;
}, {});
mods.sayHello();
mods.sayGoodbye();
Note on the difference between module.exports and exports
Typically module.exports === exports, but it is recommended to use module.exports for the following reason
exports = function Foo() { } // will not do anything
module.exports = function Foo() { } // but this will do what you expect
// however these two lines produce the same result
exports.foo = 'Bar';
module.exports.foo = 'Bar';
For this reason, module.exports is recommended in all cases.
It's not perfect, but something like this should help you accomplish this:
var fs = require('fs');
var path = require('path');
var files = fs.readdirSync(__dirname);
var ownFilename = __filename.substr(__filename.lastIndexOf(path.delimiter) + 1);
var modules = {};
for (var i = 0; i < files.length; i++) {
var filename = files[i];
if (filename.substr(-3) === '.js' && filename !== ownFilename) {
modules[filename.slice(0, -3)] = require('./' + filename);
}
}
console.log(modules.SayByeBye());
console.log(modules.SayHello());

Extend node application with new actions

I've created application which is handle some basic actions like save(e.g. saving a file in file system) edit etc ,now I want that some users will have the ability to extend this functionality with new actions,for example user
will clone the application and add additional file with new actions (and some callback) and then register on some event
I should this new actions under the hood,my question is how can I take this new actions from new file and run then in my process,simple example will be very helpful
UPDATE
lets assume that this is my file that handle the action and user want to add additional action like delete
var fs = require('fs');
module.exports = {
fileAction: function (req, res, filePath) {
var urlAction = urlPath.substr(urlPath.lastIndexOf("/") + 1);
if (urlAction === 'save') {
this.save(req,res,filePath);
} else
this.delete(req,res,filePath);
},
save: function (req,res,filePath) {
var writeStream = fs.createWriteStream(filePath, {flags: 'w'});
req.pipe(writeStream);
res.writeHead(200, { 'Content-Type': 'text/plain' });
},
delete: function (req,res,filePath) {
},
}
and the delete code should be something like this
filePath = 'C://test.txt';
fs.unlinkSync(filePath);
Now as lordvlad suggest user should have a new file with his specific implementation which should be used by lordvlad suggestion design flow,my question is how to add this delete functionality(which is very simple) and make it work,like some POC for this.
I could imagine some plugin system like so:
main.js
// some setup
var EE = require('events').EventEmitter;
var glob = require('glob');
var eventBus = new EE();
// find plugins
glob("plugins/*.js", function(err, files) {
if (err) {
// some error handling here
}
files.forEach(function(file) {
var plugin = require(file);
plugin(eventBus);
});
});
plugins/my-plugin.js
module.exports = function(eventBus) {
// do something interesting
// listen for events on the event bus
eventBus.on("foo", function(e){
// do something
});
eventEmitter.emit("pluginReady", "my-plugin");
};
Of course you could substitute the event emitter with some global object, or make the plugin handle callbacks by passing a callback instead of an event bus. I think the key aspect is to load plugins (done within the glob ... require block) and and to make them fit into your system (which you will need to figure out yourself or provide some code samples of what you already have so somebody can give you another hint).
UPDATE after OPs update
main.js
var glob = require('glob');
var xtend = require('xtend');
module.exports = {
save: function(..){..},
load: function(..){..},
}
// typeof module.exports.delete === 'undefined',
// so you cannot call module.exports delete or similar
glob("plugins/*.js", function(err, files) {
files.forEach(function(file){
var plugin = require(file);
// call plugin initializer if available
if (typeof plugin.init === "function")
plugin.init();
xtend(module.exports, plugin);
});
// here all plugins are loaded and can be used
// i.e. you can do the following
module.exports.delete(...);
});
plugins/my-plugin.js
module.exports = {
delete: function(..){..},
init: function() {
// do something when the plugin loads
}
}
Mind though, that any plugin could overwrite another plugin's methods.

How to setup yeoman test for subgenerator that reads package.json

I have a subgenerator that uses the name from the package.json. Now I want to test that function and wrote a before() that is supposed to create a dummy package.json for the test.
Problem is that the subgenerator cannot read the dummy json file.
test file:
before(function (done) {
helpers.run(path.join( __dirname, '../addcomponent'))
.inDir(path.join( __dirname, './tmp'), function(dir) {
fs.copyTpl(
path.join(__dirname, '../app/templates/_package.json'),
dir + 'package.json',
{ ProjectName: 'foo' }
);
var test = fs.readJSON(dir + 'package.json');
console.log('test: ' + test); // returns the object
console.log('test.name: ' + test.name); // returns the correct name
})
.withArguments(['foo'])
.withPrompts(prompts)
.withOptions(options)
.on('end', done);
});
but in my sub-generator:
var memFs = require('mem-fs');
var editor = require('mem-fs-editor');
var store = memFs.create();
var fs = editor.create(store);
...
init: function() {
this.pkg = this.fs.readJSON('package.json');
console.log('this.pkg: ' + this.pkg); // returns undefined
}
// or
init: function() {
this.on('ready', function() {
this.pkg = this.fs.readJSON('package.json');
console.log('this.pkg: ' + this.pkg); // returns undefined
});
}
// or
anyOther: function() {
this.pkg = this.fs.readJSON('package.json');
console.log('this.pkg: ' + this.pkg); // returns undefined
}
The whole setup can be found here: https://travis-ci.org/markusfalk/generator-kickstart/builds/58892092
thanks for any help
Edit: I'll keep the old answer underneath and that's probably relevant to most people running into this issue, but not to you.
The idea behind mem-fs is to have an in memory store. It doesn't write anything to disk automatically. As so, it keep the state in the mem-fs instance. In this case, you're creating your own mem-fs instance, while yeoman use another instance. This mean the file you write is never seen by Yeoman (and never written to disk).
For you, the fix would be to use the generator instance provided as the first parameter of the ready event.
helpers.run(path.join( __dirname, '../addcomponent'))
.on('ready', function (generator) {
generator.fs.write('file.txt', 'foo');
});
Another option is to use the node.js sync fs methods. (fs.writeFileSync(), etc)
My guess is you're using this.fs.readJSON() inside your generator constructor.
The constructor is initialized before the ready event is triggered. This mean you read the file before it is actually written.
The usual fix is to never read inside the constructor. You can delay this step until the initializing phase where the inDir() (or the ready event) callback has run.
As a side note, you should use inTmpDir() rather than inDir()

Categories

Resources