I'm very, very new to the whole NodeJS stack, and I'm trying to rough up a simple login system for practice.
Jumping to my question,
app.js
...
var mongoose = require( 'mongoose' );
var templates = require( './data/inc.js' ); // includes schema structures
...
user.js - included in inc.js
...
module.exports =
{
"Schema" : new exports.mongoose.Schema({
"uid": mongoose.Schema.Types.ObjectId,
"username": { type:String, unique:true },
"alias": String,
"credentials":
{
"salt": String,
"password": String,
"key": String
},
"profile":
{
"age": { type: Number, min: 18 }
},
"last_login": Date,
"updated": { type: Date, default: Date.now }
})
}
...
The 'user.js' script above will not work because it doesn't have access to the mongoose object variable instantiated in the 'app.js' script. In PHP any included/required scripts would be able to access variables from the parent script, but in NodeJS as I know it for example I have to re-require/state the mongoose variable in order to create my schema tree.
user.js
...
* var mongoose = require( 'mongoose' ); // must include in script to use mongoose object
module.exports
{
...
}
...
Is there any work-around that will allow me the same scope access as PHP?
The answer is that there are workarounds, but you really don't want to use them, ever, ever, except for things which you want to hack into the global scope of all running modules in your application up to and including all dependencies (mongoose) and all of ITS dependencies.
override.js
global.thisIsNowAvailable = true;
flaky-file.js
if (thisIsNowAvailable) { /* ... */ }
index.js
require("./override");
require("./flaky-file");
The same will work for overriding methods on global prototypes, et cetera.
Unless your library is super-awesome and is intended to intercept, parse and interpret code at require-time
require("babel/register"); // all loaded modules can now be written in ES6
doing this for other reasons leads to horrible code-bases...
broken-index.js
require("flaky-file");
require("override");
// you might have just attempted to reference a variable that doesn't exist,
// thrown an error and crashed your entire server
// (not just a single connection, like PHP... ...the entire server went down,
// for everyone, and it has to be restarted).
Think of modules as separate function scopes.
It's really simple to do something like:
needs-mongoose.js
function doSomeInitWithMongoose (db) { /* ... */ }
function doSomeRuntimeWithMongoose (db, params) { /* ... */ }
module.exports = mongoose => {
doSomeInitWithMongoose(mongoose);
return {
run: params => {
/* ... app is run here ... */
doSomeRuntimeWithMongoose(mongoose, params);
}
};
};
configures-mongoose.js
var mongoose = require("mongoose");
function configure (db, cfg) { /* ... */ return db; }
module.exports = config => {
var configuredDB = configure(mongoose, config);
return configuredDB;
};
main.js
// to support arrow functions and other awesome ES6, including ES6 modules
require("babel/register");
var config = require("./mongoose-config");
var db = require("./configures-mongoose")(config);
var app = require("./needs-mongoose")(db);
app.run({ /* ... */ });
EDIT
Updated the last few files to be a structurally-correct pseudo-program (which does absolutely nothing, of course);
Of course, if index.js or server.js were to require("babel/register"); and then load main.js (without the Babel include in it), all of the require statements south of Babel could be written as ES6 modules, without issue.
server.js
require("babel/register");
require("./es6-main");
es6-main.js
import config from "./mongoose-config";
import configureDB from "./configures-mongoose";
import loadApp from "./needs-mongoose";
const db = configureDB(config);
const app = loadApp(db);
app.run({ /* ... */ });
Note that now I'm naming the functions I was originally returning, because in JS when you return a function, you can immediately call it...
getFunc( config )( data );
...but you can't act immediately on import statements.
Rule of thumb is that if you're going to export an object to the outside world, it should have 0 external dependencies, or all external dependencies will be set up later, by setters of some kind:
var utils = require("./utils"); // doesn't need any information
utils.helperFunc(data);
or
var catsAndPorn = true;
var internets = [];
var SeriesOfTubes = require("series-of-tubes");
var internet = new SeriesOfTubes( catsAndPorn );
internets.push( internet );
or
var bigOlFramework = require("big-ol-framework");
bigOlFramework.setDBPool( myDBCluster );
http.createServer( bigOlFramework.connectionHandler ).listen( 8080 );
None require outside information for their actual init (though may require their own internal dependencies).
If you want to return something which does rely on external init, either export a factory/constructor, or export a function, which accepts your config/data, and then returns what you want, after an init sequence.
EDIT 2
The last piece of advice here is that as far as mongoose usage goes, or Gulp, to a similar extent, or several routers...
...when you want to have a single file which registers its contents to a registry, or requires a core-component, to be able to return something, the pattern in Node which makes the most sense is to return a function which then does the init
var Router = require("router");
var router = new Router( );
require("./routes/login")(router);
require("./routes/usesrs")(router);
require("./routes/articles")(router);
Where "./routes/articles.js" might look like
import ArticlesController from "./../controller/articles"; // or wherever
var articles = new ArticlesController();
module.exports = router => {
router.get("/articles", ( ) => articles.getAll( ));
router.post("/articles", ( ) => articles.create( ));
};
So if you were looking to structure ORM based on schema, you might do similar:
var mongoose = require("mongoose");
var Users = require("./schema/users")(mongoose);
where "./schema/users" looks like:
module.exports = mongoose => {
return new mongoose.Schema({ /* ... */ });
};
Hope that helps.
Why don't you just do this?
var mongoose = require( 'mongoose' );
...
"Schema" : new mongoose.Schema({
Instead of:
exports.mongoose.Schema // I'm not sure where you got `exports.mongoose` from.
Also you don't have to use the .js when requiring like:
var templates = require( './data/inc' );
Edit
I believe you can't do it like PHP. Also the requires are cached so no need to worry about re requiring.
Related
Let's say we are creating a module called app by constructing a new vm.SourceTextModule object:
const context = {
exports: {},
console, // custom console object
};
const sandbox = vm.createContext(context);
const app = new vm.SourceTextModule(
`import path from 'path';
console.log(path.resolve('./src'));`,
{
context: sandbox,
}
);
According to the Node.js documentation to obtain the default export from path module we should "link" the imported dependencies of app module to it.
To achieve this we should pass linker callback to app.link method:
async function linker(specifier, referencingModule) {
// the desired logic...
}
await app.link(linker);
How to implement linker function properly so that we could import path module in newly created app module and use it:
await app.evaluate(); // => /home/user/Documents/project/src
P.S. We are using TypeScript, so I checked if we have installed types for path package.
package.json:
"#types/node": "^17.0.31",
I found https://github.com/nodejs/node/issues/35848 where someone posted a code snippet.
From there I've adapted the following linker callback:
const imports = new Map();
async function linker(specifier, referencingModule) {
if (imports.has(specifier))
return imports.get(specifier);
const mod = await import(specifier);
const exportNames = Object.keys(mod);
const imported = new vm.SyntheticModule(
exportNames,
() => {
// somehow called with this === undefined?
exportNames.forEach(key => imported.setExport(key, mod[key]));
},
{ identifier: specifier, context: referencingModule.context }
);
imports.set(specifier, imported);
return imported;
}
The code snippet from the GitHub issue didn't work for me on Node 18.7.0 as is, because the evaluator callback passed to the constructor of SyntheticModule is somehow called with this set to undefined. This may be a Node bug.
I also cached the imported SyntheticModules in a Map because if they have internal state, creating a new SyntheticModule every time will reset that state.
I am writing a custom webpack loader to remove unnecessary code that Terser can't pick up.
Here's the sample source output from webpack loader;
const SvgsMap = {
map1: () => {
return '1';
},
map2: () => {
return '2';
},
map3: () => {
return '3';
},
// ...redacted
map100: () => {
return '100';
},
}
Note that above comes into the loader as a string. And I have a whitelist of string[] as which of them that should be included in the build output;
const whitelistsArr = ["map1"]
I am currently writing a webpack loader to pre-process this before getting into bundled. Which currently uses Node VM that I assume could parse it to javascript object, in which then I can remove some of the unused properties in SvgsMap, then output it back again as a string.
My question is;
Am I doing it the right way with Loader to remove them? Or is it actually a webpack plugin job to do this? Any other alternatives?
I am hitting a rock doing this with VM, It seems like it's unable to mutate the existing code and output it back as a string. Am I wrong here?
Any suggestion is appreciated.
Here's my loader's code so far;
const path = require( 'path' );
const { loader } = require( 'webpack' );
const vm = require( 'vm' );
const whitelists = ['frame21Web'];
const loaderFn = function ( source ) {
/** #type {loader.LoaderContext} */
// eslint-disable-next-line babel/no-invalid-this
const self = this;
const filename = path.basename( self.resourcePath );
const templateWithoutLoaders = filename.replace( /^.+!/, '' ).replace( /\?.+$/, '' );
const vmContext = vm.createContext( { module: {} } );
let newSource = '';
try {
const vmScript = new vm.Script( source, { filename: templateWithoutLoaders } );
const cachedData = vmScript.createCachedData();
console.log(cachedData.toString()); // Doesn't seem to output as a string.
}
catch (err) {
console.error(err);
}
console.log( 'loader', filename, source );
process.exit( 0 );
return source;
};
module.exports = loaderFn;
There may be a couple answers to this question. Difficult to know without knowing the reasoning behind the removal.
If you have control of the file, you could use a combination of Webpack's Define plugin, and some if/else logic. For example
// in your WP config file
new webpack.DefinePlugin({
IS_CLIENT: JSON.stringify(true), // will only be true when compiled via WP
});
// in your module
if (process.env.IS_CLIENT) {
SvgsMap.map1 = () => '1';
}
The above pattern allows for adding/removing chunks of code for your Client bundle, while also allowing for use on the Server.
The other option would be to write a custom Babel plugin (not a WP plugin). I found this article helpful in the past, when I had to write some plugins. Babel gives you more control of how the parts of a JS file are processed, and you can use that plugin outside of WP (like while running Unit tests).
Normaly I'm using ES6 syntax to import things from another file:
target.js
import { articles } from '/imports/fixtures.js'
console.log(articles.main)
fixtures.js
export const
articles = {
main: { _id: '1234', title: 'anything' }
}
Now I need to use the fixtures.js file in some testing modules, which needs the require syntax.
But this would not work:
var { articles } = require('/imports/fixtures.js')
What is the correct syntax for this?
Destructuring assignement is a recent feature, if your version of javascript (I guess of nodejs) is prior to ES2015, you could use:
var articles = require('/imports/fixtures.js').articles
N.B: NodeJS supports of desctructuring assignment start with v6.
if you are exporting articles as a single object, you need to receive as a single object.
For This
export const
articles = {
main: { _id: '1234', title: 'anything' }
}
Use This
var articles = require('/imports/fixtures.js');
For main
This will open the articles object and take out main and store it in a variable main in target.js.
var {main} = require('/imports/fixtures.js');
I'm trying to split my node module functions into additional files as there are numerous functions I'm looking to add.
I would like to call the main file's functions into files loaded from lib and be able to call the lib functions directly, see:
In my main file index.js:
function Api(opt) {
// set options
}
Api.prototype.get = function (endpoint) {
return this.request('GET', endpoint, null);
};
Api.prototype.Catalog = require('./lib/catalog.js');
module.exports = Api;
Then in lib/catalog.js
function Catalog () {};
Catalog.prototype.getCategories = function () {
return Api.get('categories');
}
module.exports = Catalog;
Then I'm hoping to achieve the following when the module is required, so the Catalog file will give access to :
var Module = require('module');
api = new Module({
url: 'http://example.com', // without trailing slash
username: 'username',
password: 'password'
});
api.Catalog.getCategories();
When doing it this way I am getting the following error:
TypeError: Cannot read property 'getCategories' of undefined
Is there a recommended way to achieve this or perhaps splitting it down into multiple node modules?
Try adding require for the new modules
var Module = require('module');
var Catalog = require('Catalog');
var Api = require('Api');
api = new Module({
url: 'http://example.com', // without trailing slash
username: 'username',
password: 'password'
});
api.Catalog.getCategories();
I'd like to add a default toDisplay function to all models which will use metadata, not unlike attribute/association definitions, to perform manipulations on the instance's attributes/associations making them suitable for display in the UI.
for example:
Foo.findOne(someId)
.exec(function(err, foo) {
...
res.view({
foo: foo.toDisplay(),
});
});
So, I'd like to add this function too all models. I can imagine a
Model.prototype.toDisplay = ...
solution, but I'm not sure where to get Model from (some long require('waterline/..../model') path?), and if I had Model, where to put that snip-it.
Please advise.
Model configuration is fully documented here on SailsJS.org. #umassthrower is correct in pointing out that adding an instance method to config/models.js would add it to all of your models; he's also correct in observing that this is not the intended use of the config file.
The reason you're finding this a bit more challenging in Sails than Rails is that Ruby has real classes and inheritance, and Javascript just has objects. One fairly clean way to simulate inheritance and extend your model objects from a "base" object would be to use something like Lodash's _.merge function. For example you could save your base model in lib/BaseModel.js:
// lib/BaseModel.js
module.exports = {
attributes: {
someAttribute: 'string',
someInstanceFunction: function() {
// do some amazing (synchronous) calculation here
}
}
};
Then in your model file, require lodash and use _.extend:
// api/models/MyModel.js
var _ = require('lodash');
var BaseModel = require("../../lib/BaseModel.js");
module.exports = _.merge({}, BaseModel, {
attributes: {
someOtherAttribute: 'integer'
}
};
The attributes from your base model will be merged with MyModel, with MyModel taking precedence.
Setting the first argument to the empty model {} is important here; _.merge is destructive for the first object sent in, so if you just did _.merge(BaseModel, {...} then the base model would be modified.
Also, remember to npm install lodash!
In Sails 0.x, when the moduleloader was loaded, you could access to sails.models directly, but now in 1.x this is not ready yet, so, my solution to this was creating a custom hook that wraps the loadModels function of sails.modules, this may not be the best solution but works for me #adam-pietrasiak hope this works for you too :) I am also super lazy when it comes to repeating code.
// provide this code in api/hooks/overrides.js or use another name, who cares
const _ = require('lodash');
module.exports = function (sails) {
return {
defaults: {},
savedModelLoad: null,
configure: function () {
this.savedModelLoad = this.savedModelLoad || sails.modules.loadModels;
sails.modules.loadModels = this.loadModelsAndApplyOverrides;
},
loadModelsAndApplyOverrides: function(cb){
this.savedModelLoad(function (err, models) {
const newModels = _.map(models, applyModelOverrides);
cb(err, newModels);
});
}
};
};
function applyModelOverrides(model) {
return _.merge(model, {
// do your custom stuff here
attributes: {
someAttribute: 'string',
someInstanceFunction: function() {
// do some amazing (synchronous) calculation here
}
}
});
}