Extend node application with new actions - javascript

I've created application which is handle some basic actions like save(e.g. saving a file in file system) edit etc ,now I want that some users will have the ability to extend this functionality with new actions,for example user
will clone the application and add additional file with new actions (and some callback) and then register on some event
I should this new actions under the hood,my question is how can I take this new actions from new file and run then in my process,simple example will be very helpful
UPDATE
lets assume that this is my file that handle the action and user want to add additional action like delete
var fs = require('fs');
module.exports = {
fileAction: function (req, res, filePath) {
var urlAction = urlPath.substr(urlPath.lastIndexOf("/") + 1);
if (urlAction === 'save') {
this.save(req,res,filePath);
} else
this.delete(req,res,filePath);
},
save: function (req,res,filePath) {
var writeStream = fs.createWriteStream(filePath, {flags: 'w'});
req.pipe(writeStream);
res.writeHead(200, { 'Content-Type': 'text/plain' });
},
delete: function (req,res,filePath) {
},
}
and the delete code should be something like this
filePath = 'C://test.txt';
fs.unlinkSync(filePath);
Now as lordvlad suggest user should have a new file with his specific implementation which should be used by lordvlad suggestion design flow,my question is how to add this delete functionality(which is very simple) and make it work,like some POC for this.

I could imagine some plugin system like so:
main.js
// some setup
var EE = require('events').EventEmitter;
var glob = require('glob');
var eventBus = new EE();
// find plugins
glob("plugins/*.js", function(err, files) {
if (err) {
// some error handling here
}
files.forEach(function(file) {
var plugin = require(file);
plugin(eventBus);
});
});
plugins/my-plugin.js
module.exports = function(eventBus) {
// do something interesting
// listen for events on the event bus
eventBus.on("foo", function(e){
// do something
});
eventEmitter.emit("pluginReady", "my-plugin");
};
Of course you could substitute the event emitter with some global object, or make the plugin handle callbacks by passing a callback instead of an event bus. I think the key aspect is to load plugins (done within the glob ... require block) and and to make them fit into your system (which you will need to figure out yourself or provide some code samples of what you already have so somebody can give you another hint).
UPDATE after OPs update
main.js
var glob = require('glob');
var xtend = require('xtend');
module.exports = {
save: function(..){..},
load: function(..){..},
}
// typeof module.exports.delete === 'undefined',
// so you cannot call module.exports delete or similar
glob("plugins/*.js", function(err, files) {
files.forEach(function(file){
var plugin = require(file);
// call plugin initializer if available
if (typeof plugin.init === "function")
plugin.init();
xtend(module.exports, plugin);
});
// here all plugins are loaded and can be used
// i.e. you can do the following
module.exports.delete(...);
});
plugins/my-plugin.js
module.exports = {
delete: function(..){..},
init: function() {
// do something when the plugin loads
}
}
Mind though, that any plugin could overwrite another plugin's methods.

Related

Is it possible to launch js files with different permissions using nodejs/npm?

I want to launch a js file in a js file with different permission. Just like this:
main.js (which gets started)
config = JSON.parse(require("./config.json")) // <- should be possible
console.log(config.authkey) // <- should be possible
require("./randomJSFile.js").run()
randomJSFile.js (which will be executed by main.js)
exports.run = () => {
let config = JSON.parse(require("./config.json") // <--- this should not be possible, only the main.js file should have access to the config.json file
console.log(config.authkey) // should not be possible
}
Does anyone know how to do something like that?
Based on a snippet from this question here you could possibly override the require function to check for the filename, something like this:
const Module = require('module');
const originalRequire = Module.prototype.require;
Module.prototype.require = function() {
if (!arguments.length || typeof arguments[0] !== 'string') return {};
if (arguments[0].includes('config.json')) return {};
return originalRequire.apply(this, arguments);
};
And then perform this override after you've already required the config in your main file, so you don't accidentally block yourself

Model return values from another file not working with Sequelize.js

I have the following code that does not work currently.
var config = require('./libs/sequelize-lib.js');
var connection = config.getSequelizeConnection();//Choosing to not pass in variable this time since this should only run via script.
var models = config.setModels(connection);//Creates live references to the models.
//Alter table as needed but do NOT force the change. If an error occurs we will fix manually.
connection.sync({ alter: true, force: false }).then(function() {
models.users.create({
name: 'joe',
loggedIn: true
}).then( task => {
console.log("saved user!!!!!");
});
process.exit();//close the nodeJS Script
}).catch(function(error) {
console.log(error);
});
sequelize-lib.js
var Sequelize = require('sequelize');
exports.getSequelizeConnection = function(stage){
var argv = require('minimist')(process.argv.slice(2)); //If this file is being used in a script, this will attempt to get information from the argument stage passed if it exists
//Change connection settings based on stage variable. Assume localhost by default.
var dbname = argv['stage'] ? argv['stage']+"_db" : 'localdb';
var dbuser = argv['stage'] ? process.env.RDS_USERNAME : 'admin';
var dbpass = argv['stage'] ? process.env.RDS_PASSWORD : 'local123';
var dbhost = argv['stage'] ? "database-"+argv['stage']+".whatever.com" : 'localhost';
//If state variable used during require overide any arguments passed.
if(stage){
dbname = stage+"_db";
dbuser = process.env.RDS_USERNAME
dbpass = process.env.RDS_PASSWORD
dbhost = "database-"+stage+".whatever.com"
}
var connection = new Sequelize(dbname,dbuser,dbpass, {
dialect: 'mysql',
operatorsAliases: false, //This gets rid of a sequelize deprecated warning , refer https://github.com/sequelize/sequelize/issues/8417
host: dbhost
});
return connection;
}
exports.setModels = function(connection){
//Import all the known models for the project.
const fs = require('fs');
const dir = __dirname+'/../models';
var models = {}; //empty model object for adding model instances in file loop below.
//#JA - Wait until this function finishes ~ hence readdirSync vs regular readdir which is async
fs.readdirSync(dir).forEach(file => {
console.log(file);
//Split the .js part of the filename
var arr = file.split(".");
var name = arr[0].toLowerCase();
//Create a modle object using the filename as the reference without the .js pointing to a created sequelize instance of the file.
models[name] = connection.import(__dirname + "/../models/"+file);
})
//Showcase the final model.
console.log(models);
return models; //This returns a model with reference to the sequelize models
}
I can't get the create command to work however with this setup. My guess is the variables must not be passing through correctly somehow. I'm not sure what I'm doing wrong?
The create command definitely works because if in the sequelize-lib.js I modify the setModels function to this...
exports.setModels = function(connection){
//Import all the known models for the project.
const fs = require('fs');
const dir = __dirname+'/../models';
var models = {}; //empty model object for adding model instances in file loop below.
//#JA - Wait until this function finishes ~ hence readdirSync vs regular readdir which is async
fs.readdirSync(dir).forEach(file => {
console.log(file);
//Split the .js part of the filename
var arr = file.split(".");
var name = arr[0].toLowerCase();
//Create a modle object using the filename as the reference without the .js pointing to a created sequelize instance of the file.
models[name] = connection.import(__dirname + "/../models/"+file);
models[name].create({
"name":"joe",
"loggedIn":true
});
})
//Showcase the final model.
console.log(models);
return models; //This returns a model with reference to the sequelize models
}
Then it works and I see the item added to the database! (refer to proof image below)
Take note, I am simply running create on the variable at this point. What am I doing wrong where the model object is not passing between files correctly? Weird part is I don't get any errors thrown in the main file?? It's as if everything is defined but empty or something and the command is never run and nothing added to the database.
I tried this in the main file also and no luck.
models["users"].create({
name: 'joe',
loggedIn: true
}).then( task => {
console.log("saved user!!!!!");
});
The purpose of this all is to read models automatically from the model directory and create instances that are ready to go for every model, even if new one's are added in the future.
UPDATE::
So I did another test that was interesting, it seems that the create function won't work in the .then() function of the sync command. It looks like it was passing it correctly though. After changing the front page to this...
var config = require('./libs/sequelize-lib.js');
var connection = config.getSequelizeConnection();//Choosing to not pass in variable this time since this should only run via script.
var models = config.setModels(connection);//Creates live references to the models using connection previosly created.
models["users"].create({
"name":"joe",
"loggedIn":true
});
//Alter table as needed but do NOT force the change. If an error occurs we will fix manually.
connection.sync({ alter: true, force: false }).then(function() {
process.exit();//close the nodeJS Script
}).catch(function(error) {
console.log(error);
});
Doing this seems to get create to work. I'm not sure if this is good form or not though since the database might not be created at this point? I need a way to get it to work in the sync function.
Well I answered my question finally, but I'm not sure I like the answer.
var config = require('./libs/sequelize-lib.js');
var connection = config.getSequelizeConnection();//Choosing to not pass in variable this time since this should only run via script.
var models = config.setModels(connection);//Creates live references to the models using connection previosly created.
//Alter table as needed but do NOT force the change. If an error occurs we will fix manually.
connection.sync({ alter: false, force: false }).then( () => {
models["users"].create({
"name":"joe",
"loggedIn":true
}).then( user => {
console.log("finished, with user.name="+user.name);
process.exit();
}).catch( error => {
console.log("Error Occured");
console.log(error);
});
}).catch(function(error) {
console.log(error);
});
turns out that process.exit was triggering before create would occur because create happens async. This means that all my code will have to constantly be running through callbacks...which seems like a nightmare a bit. I wonder if there is a better way?

Mongoose possible issue

I have an issue using mongoose.
The application I am writing consists in a file watcher that notifies clients about certain events via email and socketio messages.
I made an example that shows the problem:
basically, there is a class called mainFileWatcher that contains a submodule which in turn watches for new files or folders created in the script directory, emitting an "event" event when that happens. The mainFileWatcher listens for that event and calls a static method of a mongoose Client model.
If you run the script setting REPL=true you'll be able to access a watcher.submodule object and manually emit an "event" event.
Now, if you manually trigger the event, you'll see a statement that
says that the "event" event was triggered and an email address as a response.
Otherwhise, if you create a file or a folder in the script folder, you'll
just see that statement. Actually, if you run the script with REPL=true
you'll get the email only after pressing any key, nothing otherwise.
The fact that you don't get the email address as a response means to me that
the code in the promise in mongoose model doesn't get called for some reason.
Here is the code, sorry I couldn't make it shorter
// run
// npm install mongoose bluebird inotify underscore
//
// If you run the script with REPL=true you get an interactive version
// that has as context the filewatcher that emit events in my original code.
// It can be acessed through the watcher.submodule object.
// The watcher triggers a "event" event when you create a file or a folder in the script
// directory.
//
// If you emit manually an "event" event with the watcher.submodule in the repl, you should see a
// statement that "event" was triggered and
// an email address (that belongs to a fake client created by the bootstrap
// function at startup).
// If instead you create a file or a folder in the script folder (or whatever folder you have setted),
// you should see this time you'll have no email response. Actually, if you run with REPL=true,
// you'll have no email response untill you press any key. If you run without REPL, you'll
// have no email response.
'use strict';
var mongoose = require('mongoose');
//mongoose.set('debug', true);
mongoose.Promise = require('bluebird');
var Schema = mongoose.Schema;
var EventEmitter = require('events');
var util = require('util');
var _ = require("underscore");
var Inotify = require('inotify').Inotify;
var inotify = new Inotify();
// Schema declaration
var clientSchema = new Schema({
emails: {
type: [String]
}
}, {strict:false});
clientSchema.statics.findMailSubscriptions = function (subscription, cb) {
this.find({
subscriptions: {$in: [subscription]},
mailNotifications: true
}).exec().then(function (clients) {
if(!clients || clients.length === 0) return cb(null, null);
var emails = [];
clients.forEach(function (client) {
Array.prototype.push.apply(emails, client.emails)
});
return cb(null, emails);
}).catch(function(err){
console.error(err);
})
};
var clientModel = mongoose.model('Client', clientSchema);
// Mongoose connection
mongoose.connect('mongodb://localhost:27017/test', function (err, db) {
if (err) console.error('Mongoose connect error: ', err);
});
mongoose.connection.on('connected', function () {
console.log('Mongoose connected');
});
// bootstrap function: it inserts a fake client in the database
function bootstrap() {
clientModel.findOne({c_id: "testClient"}).then(function (c) {
if(!c) {
var new_c = new clientModel({
"name": "Notifier",
"c_id": "testClient",
"subscriptions": ["orders"],
"registeredTo": "Daniele",
"emails": ["email#example.com"],
"mailNotifications": true
});
new_c.save(function(err){
if (err) console.error('Bootstrap Client Error while saving: ', err.message );
});
}
});
}
// submodule of the main file watcher: it looks for new files created in the script dir
var submoduleOfFileWatcher = function() {
var _this = this;
var handler = function (event) {
var mask = event.mask;
var type = mask & Inotify.IN_ISDIR ? 'directory' : 'file';
if (mask & Inotify.IN_CREATE) {
_this.emit("event", event);
}
}
var watcher = {
path: '.', // here you can change the path to watch if you want
watch_for: Inotify.IN_CREATE,
callback: handler
}
EventEmitter.call(this);
this.in_id = inotify.addWatch(watcher);
}
util.inherits(submoduleOfFileWatcher, EventEmitter);
// Main File Watcher (it contains all the submodules and listensto the events emitted by them)
var mainFileWatcher = function () {
this.start = function() {
bootstrap();
_.bindAll(this, "onEvent");
this.submodule = new submoduleOfFileWatcher();
this.submodule.on("event", this.onEvent)
};
this.onEvent = function() {
console.log("event triggered");
clientModel.findMailSubscriptions("orders", function(err, mails) {
if (err) console.error(err);
console.log(mails); // THIS IS THE CODE THAT OUTPUTS ONLY IF YOU TRIGGER THE "event" EVENT manually
})
}
}
// Instantiate and start the watcher
var watcher = new mainFileWatcher()
watcher.start();
// start the repl if asked
if (process.env.REPL === "true") {
var repl = require('repl');
var replServer = repl.start({
prompt: 'filewatcher via stdin> ',
input: process.stdin,
output: process.stdout
});
replServer.context.watcher = watcher;
}
Just copy and paste the code, install deps and run it.
Things I tried:
I changed the mongoose Promise object to use bluebird promises, hoping that
I could intercept some exception.
I browsed Mongoose calls with node-inspector and indeed the find method gets called and it seems that it throws no exceptions. I really can't figure out what's happening because I don't get any errors at all.
It is not the database connection (I tried to open one just before the findMailSubscriptions call and got an exception for trying to open an already opened connection).
I figure it might be some issues with scopes or promises.
Is there something I am missing about mongoose, or is it just my code that causes this behaviour?

How to create a Node.js module from Asynchronous Function response?

This problem is in regards the creation of a Node module that depends on a async function to return the content. For instance, "src/index.js" is the following:
GOAL
The module A, implemented from "src/index" must be resolved and must not depend on promises, or anything else... It will just return a JSON object of computed values.
var a = require("./src/index");
// should be resolved already.
console.log(a.APP_NAME)
src/index.js
"use strict";
var CoreClass = require("./core-class");
var coreInstance = new CoreClass();
coreInstance.toJson(function(err, coreData) {
if (err) {
console.log("Error while loading " + __filename);
console.log(err);
return;
}
console.log(coreData);
// Export the data from the core.
module.exports = coreData;
});
src/core-class.js
The implementation of the method "toJson()", defined in the class in the file "src/core-class.js" is as follows:
/**
* #return {string} Overriding the toStrng to return the object properties.
*/
ISPCore.prototype.toJson = function toJson(callback) {
var result = {
// From package.json
APP_NAME: this.appPackageJson.name.trim(),
APP_VERSION: this.appPackageJson.version.trim(),
APP_CONFIG_DIR: this.APP_DIR + "/config",
APP_DOCS_DIR: this.APP_DIR + "/docs",
APP_TESTS_DIR: this.APP_DIR + "/tests",
};
// TODO: Remove this when we have a registry
if (!this.pom) {
// Let's verify if there's a pom.xml file in the roort APP_DIR
var _this = this;
this.APP_POM_PATH = this.APP_DIR + "/pom.xml";
// Check first to see if the file exists
fs.stat(this.APP_POM_PATH, function(err, fileStats) {
// The file does not exist, so we can continue with the current result.
if (err) {
return callback(null, result);
}
_this._loadPomXmlSettings(function pomXmlCallback(err, pomObject) {
if (err) {
return callback(err);
}
_this.pom = pomObject;
// Update the result with the pom information
result.POM_GROUPID = _this.pom.groupid || "undefined";
result.POM_ARTIFACTID = _this.pom.artifactid || "undefined";
result.POM_VERSION = _this.pom.version || "undefined";
// Callback with the updated version.
return callback(null, result);
});
});
} else {
result.POM_GROUPID = this.pom.groupid || "undefined";
result.POM_ARTIFACTID = this.pom.artifactId || "undefined";
result.POM_VERSION = this.pom.version || "undefined";
// Return just what's been collected so far, including the pom.
return callback(null, result);
}
};
Test class
Requiring this and trying to use the library just returns an empty object. Here's the test class...
// describing the method to get the instance.
describe("require(sp-core) with pom.xml", function() {
var core = null;
before(function(done) {
// Copy the fixture pom.xml to the APP_DIR
fs.writeFileSync(__dirname + "/../pom.xml", fs.readFileSync(__dirname + "/fixture/pom.xml"));
// Load the library after the creation of the pom
core = require("../src/");
console.log("TEST AFTER CORE");
console.log(core);
done();
});
after(function(done) {
// Delete the pom.xml from the path
fs.unlinkSync(__dirname + "/../pom.xml");
done();
});
it("should load the properties with pom properties", function(done) {
expect(core).to.be.an("object");
console.log("Loaded pom.xml metadata");
console.log(core);
expect(core.POM_ARTIFACTID).to.exist;
expect(core.POM_VERSION).to.exist;
done();
});
});
Execution of the tests
However, after a while, the output from the library shows up in the console.
SPCore with pom.xml
require(sp-core) with pom.xml
TEST AFTER CORE
{}
Loaded pom.xml metadata
{}
1) should load the properties with pom properties
{ APP_NAME: 'sp-core',
APP_VERSION: '0.3.5',
ENV: 'development',
NODE_ENV: 'development',
IS_PROD: false,
APP_DIR: '/home/mdesales/dev/isp/sp-core',
APP_CONFIG_DIR: '/home/mdesales/dev/isp/sp-core/config',
APP_DOCS_DIR: '/home/mdesales/dev/isp/sp-core/docs',
APP_TESTS_DIR: '/home/mdesales/dev/isp/sp-core/tests',
POM_GROUPID: 'com.mycompany',
POM_ARTIFACTID: 'my-service',
POM_VERSION: '1.0.15-SNAPSHOT' }
0 passing (142ms)
1 failing
1) SPCore with pom.xml require(sp-core) with pom.xml should load the properties with pom properties:
AssertionError: expected undefined to exist
How to properly create a module that depends on an Async call?
I'm sure this is due to the asynchronous call, but I was thinking that the module would not return {}, but wait until the callback returns.
I tried using:
Async.waterfall
Deasync (does not work)
Async.waterfall attempt
"use strict";
var async = require("async");
var CoreClass = require("./core-class");
var coreInstance = new CoreClass();
async.waterfall([
function(cb) {
coreInstance.toJson(cb);
},
function(coreData) {
console.log(coreData);
module.exports = coreData;
}
]);
Please please help!
Following the comments, I revisited the attempt of using "deasync" module, and it WORKS! YES WE CAN! Cheating with the hack of "deasync" :D
Runnable instance
The runnable solution is at http://code.runnable.com/VbCksvKBUC4xu3rd/demo-that-an-async-method-can-be-returned-before-a-module-exports-is-resolved-for-node-js-deasync-pom-parser-and-stackoverflow-31577688
Type "npm test" in the terminal box and hit "ENTER" (always works).
Just click in the "Run" button to see the execution of the code. All the source code is available. (Sometimes the container gets corrupted and the test fails).
Solution
Here's the implementation of the "GOAL" module.
/** #module Default Settings */
"use strict";
var CoreClass = require("./core-class");
var merge = require("merge");
var deasync = require("deasync");
// Core properties needed.
var coreInstance = new CoreClass();
var coreProperties = coreInstance.toJson();
// Pom properties temporary support, deasync the async call
var loadPom = deasync(coreInstance.loadPomXmlSettings);
var pomObject = loadPom(coreProperties.APP_POM_PATH);
// Merge them all.
var allProperties = merge(coreProperties, pomObject);
module.exports = allProperties;
With that, all the code is returned as expected for the module.exports!

BreezeJs with dedicated web worker

I am trying to initialize a Breeze manager inside a 'Web Worker'.
RequireJs, knockout, q, breeze are being imported inside the worker.
After a call to:EntityQuery.from('name').using(manager).execute(),
the following error appears:
Uncaught Error: Q is undefined. Are you missing Q.js? See https://github.com/kriskowal/q.
A live preview is uploaded here http://plnkr.co/edit/meXjKa?p=preview
(plunk supports downloading for easier debug).
EDIT -- relevant code
Worker.js
importScripts('knockout.js', 'q.js', 'breeze.js', 'require.js');
define('jquery', function () { return jQuery; });
define('knockout', ko);
define('q', Q); //Just trying to assign q since breeze requests Q as q
require(function () {
var self = this;
this.q = this.Q; //Just trying to assign q since breeze requests Q as q
breeze.NamingConvention.camelCase.setAsDefault();
var manager = new breeze.EntityManager("breeze/Breeze");
var EntityQuery = breeze.EntityQuery;
// Q or q here is defined (TESTED)
var test = function (name) {
return EntityQuery.from(name)
.using(manager).execute() // <-- Here q/Q breaks (I think on execute)
};
var primeData = function () {
return test('Languages')
.then(test('Lala'))
.then(test('Lala2'))
};
primeData();
setTimeout(function () { postMessage("TestMan"); }, 500);
});
Worker will be initialized on main page as:
var myWorker = new Worker("worker.js");
Ok here it goes:
Create a new requireJs and edit the
isBrowser = !!(typeof window !== 'undefined' && typeof navigator !== 'undefined' && window.document)
to
isBrowser = false
Create a new Jquery so it uses nothing related to window and generally anything that a WebWorker cannot access. Unfortunatelly i can't remember where i got this Custom JQueryJs but i have uploaded it here "https://dl.dropboxusercontent.com/u/48132252/jqueydemo.js".
Please if you find the author or the original change link and give credit.
My workerJs file looks like:
importScripts('Scripts/test.js', 'Scripts/jqueydemo.js', 'Scripts/q.js', 'Scripts/breeze.debug.js', 'Scripts/require2.js');
define('jquery', function () { return jQuery; });
require(
{
baseUrl: "..",
},
function () {
var manager = new breeze.EntityManager("breeze/Breeze");
var EntityQuery = breeze.EntityQuery;
var primeData = function () {
return EntityQuery.from(name)
.using(manager).execute() // Get my Data
.then(function (data) {
console.log("fetced!\n" + ((new Date()).getTime()));
var exportData = manager.exportEntities(); // Export my constructed entities
console.log("created!\n" + ((new Date()).getTime()));
var lala = JSON.stringify(exportData)
postMessage(lala); // Send them as a string to the main thread
})
};
primeData();
});
Finally on my mainJs i have something like:
this.testWorker = function () {
var myWorker = new Worker("worker.js"); // Init Worker
myWorker.onmessage = function (oEvent) { // On worker job finished
toastr.success('Worker finished and returned');
var lala = JSON.parse(oEvent.data); // Reverse string to JSON
manager.importEntities(lala); // Import the pre-Constructed Entities to breezeManager
toastr.success('Import done');
myWorker.terminate();
};
};
So we have managed to use breeze on a WebWorker enviroment to fetch and create all of our entities, pass our exported entities to our main breeze manager on the main thread(import).
I have tested this with 9 tables fully related to each other and about 4MB of raw data.
PROFIT: UI stays fully responsive all the time.
No more long execution script, application not responding or out of memory errors) at least for chrome
*As it makes sense breeze import entities is way more faster than the creation a full 4MB raw data plus the association process following for these entities.
By having all the heavy work done on the back, and only use import entities on the front, breeze allows you to handle large datasets 'like a breeze'.

Categories

Resources