I'm trying to dynamically export modules. I'm close but can't figure out how to fix my syntax.
Hard coded:
// index.js inside folder 'models'
const { User } = require('./User');
const { Token } = require('./Token');
const { Session } = require('./Session');
module.exports = {
User,
Token,
Session,
};
Dynamically coded (doesn't work):
// index.js inside folder 'models'
const fs = require('fs');
const path = require('path');
module.exports = () => {
fs.readdirSync(__dirname).forEach((file) => {
if (file === 'index.js') return false;
const fullName = path.join(__dirname, file);
if (file.toLowerCase().indexOf('.js')) {
// I think this somehow needs to be destructured like
// `return {require(fullName)}` or
// `require(fullName)[fullName]` I think
require(fullName);
}
});
};
Elsewhere in my code, I initialize it based on the folder name:
// server.js
require('./models')();
Your dynamic export will not work because you are not returning anything to the exported function.
Try this code as your dynamic model export file
// index.js inside folder 'models'
const fs = require('fs')
const path = require('path')
const models = {}
fs.readdirSync(__dirname)
.filter(file => file !== 'index.js')
.forEach(file => {
const fullName = path.join(__dirname, file)
if (file.toLowerCase().endsWith('.js')) {
// Removes '.js' from the property name in 'models' object
const [filename] = file.split('.')
models[filename] = require(fullName)[filename]
}
})
module.exports = models
This approach no longer exports a function so your require in server.js should now look like this
// server.js
require('./models');
Related
currently, it's working using hash routing, I want to remove hash for SEO reasons so it is possible?
it creates routes like
https://mainDomain/program ← program build run
https://mainDomain/program/#detail ← program routing
I want https://mainDomain/program/#detail to https://mainDomain/program/detail
if I use BrowserRouter it shows no such directory
this is my build deploy structure in AWS ↓
might be this Helpful for you. you can create one Node server which can serve your Project builds.
let path = require("path");
let fsp = require("fs/promises");
let express = require("express");
let isProduction = process.env.NODE_ENV === "production";
async function createServer() {
let app = express();
/**
* #type {import("vite").ViteDevServer}
*/
let vite;
if (!isProduction) {
vite = await require("vite").createServer({
root: process.cwd(),
server: { middlewareMode: "ssr" },
});
app.use(vite.middlewares);
} else {
app.use(require("compression")());
app.use(express.static(path.join(__dirname, "dist")));
}
app.use("*", async (req, res) => {
let url = req.originalUrl;
// Use a separate HTML file for the "Inbox" app.
let appDirectory = url.startsWith("/inbox") ? "inbox" : "";
let htmlFileToLoad;
if (isProduction) {
htmlFileToLoad = path.join("dist", appDirectory, "index.html");
} else {
htmlFileToLoad = path.join(appDirectory, "index.html");
}
try {
let html = await fsp.readFile(
path.join(__dirname, htmlFileToLoad),
"utf8"
);
if (!isProduction) {
html = await vite.transformIndexHtml(req.url, html);
}
res.setHeader("Content-Type", "text/html");
return res.status(200).end(html);
} catch (error) {
if (!isProduction) vite.ssrFixStacktrace(error);
console.log(error.stack);
return res.status(500).end(error.stack);
}
});
return app;
}
createServer().then((app) => {
app.listen(3000, () => {
console.log("HTTP server is running at http://localhost:3000");
});
});
for extra information, you can referlink.
I have a complicated task to do. I need to separate my sequelize models in separate folders inside the models folder, just like this structure:
├── node_modules
├── src
| └── models
| ├──settings
| | ├── user.js
| | └── location.js
| ├──stock
| | ├── stock.js
| | └── products.js
| └── index.js
Today, I have all models in models root folder, together with index.js, in a way that I can call every model from controllers as
const { users, stock } = require("../../models")
The code inside index.js is like this:
"use strict"
const fs = require("fs")
const path = require("path")
const Sequelize = require("sequelize")
const basename = path.basename(__filename)
const env = process.env.NODE_ENV || "development"
const envConfigs = require("../config/config")
const config = envConfigs[env]
const db = {}
let sequelize
if (config.url) {
sequelize = new Sequelize(config.url, config)
} else {
sequelize = new Sequelize(
config.database,
config.username,
config.password,
config
)
}
fs.readdirSync(__dirname)
.filter((file) => {
return (
file.indexOf(".") !== 0 && file !== basename && file.slice(-3) === ".js"
)
})
.forEach((file) => {
const model = require(path.join(__dirname, file))(
sequelize,
Sequelize.DataTypes
)
db[model.name] = model
})
Object.keys(db).forEach((modelName) => {
if (db[modelName].associate) {
db[modelName].associate(db)
}
})
db.sequelize = sequelize
module.exports = db
This was automaticly created using sequelize-cli.
I think that I need some search inside index.js and concatenate folders and files into Model variable inside index.js, in such a way that I can use the models like "model.settings.user" to do associations or calling the objects in controllers.
Finally, I was able to get the index.js working properly! This is the code for anyone that need it (having this code in the index.js, it will search in all folders and put all models name files into const "model"
"use strict"
const fs = require("fs")
const path = require("path")
const Sequelize = require("sequelize")
const basename = path.basename(__filename)
const env = process.env.NODE_ENV || "development"
const envConfigs = require("../config/config")
const config = envConfigs[env]
const db = {}
let sequelize
if (config.url) {
sequelize = new Sequelize(config.url, config)
} else {
sequelize = new Sequelize(
config.database,
config.username,
config.password,
config
)
}
const files = []
const sortDir = (maniDir) => {
const folders = []
const CheckFile = (filePath) => fs.statSync(filePath).isFile()
const sortPath = (dir) => {
fs.readdirSync(dir)
.filter((file) => file.indexOf(".") !== 0 && file !== "index.js")
.forEach((res) => {
const filePath = path.join(dir, res)
if (CheckFile(filePath)) {
files.push(filePath)
} else {
folders.push(filePath)
}
})
}
folders.push(maniDir)
let i = 0
do {
sortPath(folders[i])
i += 1
} while (i < folders.length)
}
sortDir(__dirname)
files.forEach((file) => {
const model = require(file)(sequelize, Sequelize.DataTypes)
db[model.name] = model
})
Object.keys(db).forEach((modelName) => {
if (db[modelName].associate) {
db[modelName].associate(db)
}
})
db.sequelize = sequelize
module.exports = db
for my project Angular 9 I must install Server-side rendering (SSR), I followed official tutorial https://angular.io/guide/universal. At the beginning I have the problem with the window is not define. So I decided to install SSR with domino and I followed this tutorial enter link description here , but I have a problem when the program build my project : elements is not define. (const elements = stripe.elements() Cannot read property elements of undefined).
Below my server.ts code
import 'zone.js/dist/zone-node';
import { ngExpressEngine } from '#nguniversal/express-engine';
import * as express from 'express';
import { join } from 'path';
import { APP_BASE_HREF } from '#angular/common';
import { existsSync } from 'fs';
import * as core from 'express-serve-static-core';
const domino = require('domino');
const fs = require('fs');
const path = require('path');
// Use the browser index.html as template for the mock window
const template = fs
.readFileSync(path.join(join(process.cwd(), 'dist/captn-boat-angular/browser'), 'index.html'))
.toString();
// Shim for the global window and document objects.
const window = domino.createWindow(template);
global['window'] = window;
global['document'] = window.document;
global ['navigator']=window.navigator;
global ['screen']=window.screen;
global['Event'] = null;
global['window'] = window;
global['document'] = window.document;
global['branch'] = null;
global['object'] = window.object;
global['localStorage'] = window.localStorage;
global['navigator'] = window.navigator ;
global['elements']=window.elements;
global['elements']=null;
global['Event'] = null;
global['KeyboardEvent'] = null;
global['stripe']=window.stripe;
window.screen = { deviceXDPI: 0, logicalXDPI: 0 };
global['MouseEvent'] = window.MouseEvent;
declare interface Window {
Stripe: any; // Or you can define a type for that in this file as well
stripe:null;
elements:null;
}
import { AppServerModule } from './src/main.server';
// The Express app is exported so that it can be used by serverless Functions.
export function app(): core.Express {
const server = express();
const distFolder = join(process.cwd(), 'dist/captn-boat-angular/browser');
const indexHtml = existsSync(join(distFolder, 'index.original.html')) ? 'index.original.html' : 'index';
// Our Universal express-engine (found # https://github.com/angular/universal/tree/master/modules/express-engine)
server.engine('html', ngExpressEngine({
bootstrap: AppServerModule,
}));
server.set('view engine', 'html');
server.set('views', distFolder);
// Example Express Rest API endpoints
// server.get('/api/**', (req, res) => { });
// Serve static files from /browser
server.get('*.*', express.static(distFolder, {
maxAge: '1y'
}));
// All regular routes use the Universal engine
server.get('*', (req, res) => {
res.render(indexHtml, { req, providers: [{ provide: APP_BASE_HREF, useValue: req.baseUrl }] });
});
return server;
}
function run() {
const port = process.env.PORT || 4000;
// Start up the Node server
const server = app();
server.listen(port, () => {
console.log(`Node Express server listening on http://localhost:${port}`);
});
}
// Webpack will replace 'require' with '__webpack_require__'
// '__non_webpack_require__' is a proxy to Node 'require'
// The below code is to ensure that the server is run only when not requiring the bundle.
declare const __non_webpack_require__: NodeRequire;
const mainModule = __non_webpack_require__.main;
const moduleFilename = mainModule && mainModule.filename || '';
if (moduleFilename === __filename || moduleFilename.includes('iisnode')) {
run();
}
export * from './src/main.server';
And then the error : elements of undefined
Thank you for your answer.
Do you use stripe in every place on your application ? I'd use it in the payment section of the related module and would connect it to lazy loading. Thus you could use one of the importer of stripe ?
like here :ngx-stripe installation
I want to use koa-views with Koa and Koa-Router with Next.js. In previous projects, I had no issues with express but in this project, I have to use Koa. Using its router, I want to render a page: /some/page/:id. Following the same Nextjs way:
router.get('/some/page/:id', async (ctx, next) => {
const actualPage = '/some/page/id' // id.js (not actual name 😝)
await ctx.render(actualPage, {/* could pass object */})
});
That would work if I was using express. With Koa:
const Koa = require('koa');
const views = require('koa-views');
// const render = require('koa-views-render'); <-- I what's this?
[..] // Making things short here
const server = new Koa();
const router = new Router();
// My issue, I'm seeing tutorials using other engines: .ejs etc
// I'm not using any, I only have .js files
server.use(views(__dirname + "/pages", { extension: 'js' }));
Using the same router.get... function as above, I get:
Error: Engine not found for the ".js" file extension
When I go to /some/page/123, I'd expect it to render the file /pages/some/page/id.js. How?
It turns out I do not need any extra modules to achieve this 🙀
Create a function called, ie, routes then pass app and router as a param
const routes = (router, app) => {
router.get('/some/page/:id', async (ctx) => {
const { id } = ctx.params
const actualPage = '/some/page/id'
// Render the page
await app.render(ctx.req, ctx.res, actualPage, {foo: 'Bar'})
}
}
module.exports = routes
Inside your server.js file:
// const routes = require('./routes);
// const app = next({ dev }); // import other modules for this section
// app.prepare().then(() => {
// const router = new Router();
// [..]
// routes(router, app)
// })
The commented out section is a slim down version to make a point in where things should be.
I have a utility module that creates an instance of a multer-gridfs storage engine for uploading files to my Mongo database. I use this module inside of any API route that requires the need to upload files.
I need to be able to update the metadata property value with a unique identifier. More than likely this will be the mongoose _id of the user uploading the file, but for now I am not concerned with that aspect of it. I really just want to know if I can change the metadata property dynamically.
Here is the storage engine gridFs_upload_engine.js:
const mongoose = require('mongoose');
const path = require('path');
const crypto = require('crypto');
const multer = require('multer');
const GridFsStorage = require('multer-gridfs-storage');
const Grid = require('gridfs-stream');
//Init Upload Engine
let gfs;
//Global instance of the DB connection
const database = mongoose.connection;
const mongoDb = process.env.MONGODB_URI || process.env.MLAB_URL;
database.once('open', () => {
//Init Stream
gfs = Grid(database.db, mongoose.mongo);
gfs.collection('uploads');
});
//Create Storage Engine
const storage = new GridFsStorage({
url: mongoDb,
file: (res, file) => {
return new Promise((resolve, reject) => {
crypto.randomBytes(16, (err, buf) => {
if (err) {
return reject(err);
}
const filename = buf.toString('hex') + path.extname(file.originalname);
const fileInfo = {
filename: filename,
bucketName: 'uploads',
metadata: 'NEED TO UPDATE THIS'
};
resolve(fileInfo);
});
});
}
});
const uploadEngine = multer({ storage });
module.exports = {
uploadEngine,
gfs
};
Above you can see the metadata property that I need to be able to dynamically change with some undetermined unique identifier. Is it possible to do that with an exported file?
Here is how I am utilizing it inside of an API route:
const express = require('express');
const router = express.Router();
//Controllers
const upload_controller = require('../../controllers/uploader');
//Utilities
const upload = require('../../utils/gridFs_upload_engine');
const { uploadEngine } = upload;
//Upload Single File
router.post(
'/single',
uploadEngine.single('file'),
upload_controller.upload_single_file
);
//Upload Multiple Files
//Max file uploads at once set to 30
router.post(
'/multiple',
uploadEngine.array('file', 30),
upload_controller.upload_multiple_files
);
module.exports = router;
I pass the uploadEngine into the API route here, so that the route controller can use it, and that works with no issue. I am just having quite a time trying to figure out how to update metatdata dynamically and I am leaning towards my current implementation not allowing for that.
I don't know much about node and have now idea what multer-gridfs is but I can answer How can I pass options into an imported module?
You can export an function that returns another function. And you would import it like
const configFunction = require('nameoffile')
// this returns a functions with the config you want
const doSomethingDependingOnTheConfig = configFunction({...someConfig})
And in the file you are importing you would have a function returning another function like
const configFunction = ({...someConfig}) => (your, func) => {
// do what you want deppending on the config
}
module.exports = configFunction
I know this doesn't answer your question the way you want, but answer you question title and I hope this give you a better understanding of how to do what you want to do.
If this doesn't help, just let me know.
You would need to pass a parameter to the module gridFs_upload_engine.js and do the magic there.
An example could be:
In gridFs_upload_engine.js file:
function uploadEngine (id, file) {
// update what you want
}
module.exports = {
...
uploadEngine: uploadEngine
}
In your router:
const upload = require('../../utils/gridFs_upload_engine')
...
router.post('/single/:id', function(req, res, next) {
...
upload.uploadEngine(req.params.id, file)
next()
}, upload_controller.upload_single_file)
In other words, when you are exposing gfs and uploadEngine inside your module, you could instead expose a function that would receive the arguments needed to perform the upload.