Iterate over folder and import index.js - javascript

I have create react app code base in which i would like to be able to iterate over a nested structure of data to import one specific file.
I have the following structure:
root.js
-modules
-- mod1
--- index.js
-- mod2
--- index.js
In root.js I would like to go over every module in modules to import index.js so that the initialization data will be run at the start of the application. Its unclear to me what is the best way to do this preferably without using any plugins if there is a solution.

In my opinion, you should include them "manually"
// root.js
require('mod1.index')
require('mod2.index')
// ...
It's more clear and direct. Unless you have 100+ modules
EDIT for dynamic import:
No dependancies proposal (variation of https://gist.github.com/kethinov/6658166#gistcomment-1603591)
'use strict'
const fs = require('fs')
const walkSync = function (dir, filelist) {
const files = fs.readdirSync(dir)
filelist = filelist || []
files.forEach(function (file) {
if (fs.statSync(dir + '/' + file).isDirectory()) {
filelist = walkSync(dir + '/' + file, filelist)
} else {
filelist.push(dir + '/' + file)
}
})
return filelist
}
allFiles = walkSync('./src')
allFiles.filter(f => f.split('/').pop() == 'index.js').forEach(f => require(f))
One dependacie proposal: Get all files recursively in directories NodejS

Turns out this was simple:
Export everything in a modules.js files.
const req = require.context('./', true, /^\.\/[a-zA-Z0-9]+\/index.js$/);
const modules = req.keys().map(req);
module.exports = modules;
Then import the modules.js file in some root.js file.

Related

How to use a custom made library

I have a AWS dependency layer on on folder nodejs/
There is node_modules/ and package.json with npm dependencies
I created a folder called utils/ and my file is util.js
Since it's a layer on AWS, I import using const utils = require('/opt/nodejs/utils/util'); on my app.js
Problem is that my test cases started failing Cannot find module '/opt/nodejs/utils/util' from 'backend/lambdas/cars/app.js'
How can I fix my test case??
const app = require('./app');
describe('lambda', function () {
it('something', async () => {
const response = await app.lambdaHandler();
....
});
});
app.js
const httpStatusCode = require('http-status-codes');
const cors = require('/opt/nodejs/utils/util');
exports.lambdaHandler = async (event) => {
return {
statusCode: httpStatusCode.OK
};
};
PS: This nodejs folder is on the same level as the lambdas folder
You should import like this const utils = require('../nodejs/utils/util')

ENOENT: no such file or directory, open './data/databases.json'

am trying to write json data from a file contained in model folder to another file inside data folder and the two folders are contained in server folder. but i keep on getting the error message that there is no such file directory. the code below is inside model/model.ts file and the file i want to write to is in data/databases.json and both are inside server folder. i have tried using the following path ./data/databases.json, ../data/databases.json but still not working. the folder is server/models/model.ts
let Products = require("../data/product.json") // i was able to import from the same file can't write to it using the same path
const fs = require("fs")
const creat = (newInfo:any) => {
return new Promise((resolve,reject)=>{
let id = companyInfo.length+1
let value = {...newInfo,"id":id}
companyInfo.push(value)
fs.writeFileSync('../data/databases.json',JSON.stringify(companyInfo, null, 2))
resolve(value)
})
}
export {creat}
i have another file where i call the above function and the folder path is server/controller/controller.ts and below is the code
import {creat} from "../models/model" // importing the function here
import http, { IncomingMessage, Server, ServerResponse } from "http";
const creatCompanyinfo = async (req:IncomingMessage,res:ServerResponse)=>{
let body =""
req.on("data",(chunk)=>{
body += chunk.toString()
})
req.on("end",async ()=>{
let newInfo = JSON.parse(body)
let result = await creat(newInfo)
res.setHeader("Content-Type","application/json")
res.end(JSON.stringify(result))
})
}
export {creatCompanyinfo}
and finally the last file that handles the routing and the path is server/app.ts.
below is the code
import http, { IncomingMessage, Server, ServerResponse } from "http";
import {creatCompanyinfo} from "./controller/controller" //importing the function here
const server: Server = http.createServer((req: IncomingMessage, res: ServerResponse) => {
if(req.url === "/companies" && req.method==="POST"){
creatCompanyinfo(req,res)
}
}
this is the path to the json file am writting to server/data/databases.json
below is the file structure
`server/`
`controller/controller.ts`
`data/databases.json`
`model/model.ts`
`app.ts`
below is the error message am getting
Server Running at Port 4000.....
node:fs:585
handleErrorFromBinding(ctx);
^
Error: ENOENT: no such file or directory, open './data/databases.json'
at Object.openSync (node:fs:585:3)
at Object.writeFileSync (node:fs:2170:35)
at /Users/dec/Challenges/week-5-task-stanzealot/server/lib/models/model.js:28:12
at new Promise (<anonymous>)
at Object.creat (/Users/dec/Challenges/week-5-task-stanzealot/server/lib/models/model.js:24:12)
at IncomingMessage.<anonymous> (/Users/dec/Challenges/week-5-task-stanzealot/server/lib/controller/controller.js:44:36)
at IncomingMessage.emit (node:events:539:35)
Behavior of relative path in import() is different from relative path in fs methods.
import path is relative to source file, but fs path is relative to working directory.
Try converting your path to an absolute path:
const path = require("path");
const aPath = path.resolve(__dirname, '../data/databases.json')
fs.writeFileSync(aPath ,JSON.stringify(companyInfo, null, 2))

Fs file handler for api

So I want to make a routes folder that has subfolders that contain routes but I don't know how to use fs with it...
I only know how to get files in the routes folder and not in subfolders
here is my file handler code
const { readdirSync } = require('fs');
module.exports = function(app){
readdirSync(__dirname).forEach(function(file) {
if (file == "index.js") return;
var name = file.substr(0, file.indexOf('.'));
const route = require('./' + name)
app.get(`/${route.name}`, async (req, res) => {
route.run(req, res)
})
});
}
it gets the files from the routes folder
-routes
|__index.js
|__route.js
|__route.js
I want to make it so it gets routes from subfolders
-routes
|__index.js
|
|__image routes
|__route1.js
I can't find any help online...
If you do not mind using external libraries i recommend using glob
What you want to do can be achieved with one function call:
const glob = require("glob");
glob("**/*.js", {cwd: __dirname}, (error, matches) => {
if (error) return;
console.log(matches);
})
This uses glob patterns to much files in a given directory, returning the whole paths like: image-routes/route1.js or index.js etc.

How do you dynamically module.export all files in a folder?

I'm trying to dynamically export modules. I'm close but can't figure out how to fix my syntax.
Hard coded:
// index.js inside folder 'models'
const { User } = require('./User');
const { Token } = require('./Token');
const { Session } = require('./Session');
module.exports = {
User,
Token,
Session,
};
Dynamically coded (doesn't work):
// index.js inside folder 'models'
const fs = require('fs');
const path = require('path');
module.exports = () => {
fs.readdirSync(__dirname).forEach((file) => {
if (file === 'index.js') return false;
const fullName = path.join(__dirname, file);
if (file.toLowerCase().indexOf('.js')) {
// I think this somehow needs to be destructured like
// `return {require(fullName)}` or
// `require(fullName)[fullName]` I think
require(fullName);
}
});
};
Elsewhere in my code, I initialize it based on the folder name:
// server.js
require('./models')();
Your dynamic export will not work because you are not returning anything to the exported function.
Try this code as your dynamic model export file
// index.js inside folder 'models'
const fs = require('fs')
const path = require('path')
const models = {}
fs.readdirSync(__dirname)
.filter(file => file !== 'index.js')
.forEach(file => {
const fullName = path.join(__dirname, file)
if (file.toLowerCase().endsWith('.js')) {
// Removes '.js' from the property name in 'models' object
const [filename] = file.split('.')
models[filename] = require(fullName)[filename]
}
})
module.exports = models
This approach no longer exports a function so your require in server.js should now look like this
// server.js
require('./models');

How would I get webpack or an other JS bundler to bundle files remotely hosted?

I have a distributed system and all JS files are exposed through HTTP. So a normal module would look like this:
http://example.com/path/to/main.js
import * as core from 'http://local.example.com/path/to/core.js';
import * as redux from 'http://cdn.example.com/redux.js#version';
// code
export default {
...
}
So each import will be using either a local resource to the system or possibly remotely available resources using CDN.
Thought when I run webpack, I get this error:
trying to parse a local generated file with such content:
import * as main from 'http://example.com/path/to/main.js';
ERROR in ./src/index.js Module not found: Error: Can't resolve
'http://example.com/path/to/main.js' in '/home/.../index.js'
Is it possible to tell webpack to fetch the urls and include them inside the bundle... While packaging cdn urls isn't a big deal for now, I'd be happy if I could simply ignore the ones with a certain url.
Thought being able to bundle remote all the http:// located files would be a good start.
Also, any remote resource linking to other resources should recursively load remotely linked resources too.
Here's my current webpack config (thought nothing much to see here):
const path = require('path');
module.exports = {
mode: 'development',
entry: './src/index.js',
output: {
filename: 'main.js',
path: path.resolve(__dirname, 'dist'),
},
module: {
rules: [
]
},
};
Edit: after reading a bit, I started writing a resolver but now I'm stuck again:
const path = require('path');
const fetch = require('node-fetch');
const url = require('url')
const fs = require('promise-fs');
const sha1 = require('sha1')
class CustomResolver {
async download_save(request, resolveContext) {
console.log(request, resolveContext)
var target = url.parse(request.request)
var response = await fetch(request.request)
var content = await response.text()
try {
await fs.stat('_remote')
} catch(exc) {
await fs.mkdir('_remote')
}
var filename = `${sha1(request.request)}.js`
var file_path = `_remote/${filename}`
await fs.writeFile(file_path, content)
var abs_path = path.resolve(file_path)
var url_path = `${target.protocol}://${target.hostname}/`
var obj = {
path: abs_path,
request: request.request,
query: '',
}
console.log(`${request.request} saved to ${abs_path}`)
return obj
}
apply(resolver) {
var self = this
const target = resolver.ensureHook("resolved")
resolver.getHook("module")
.tapAsync("FetchResolverPlugin", (request, resolveContext, callback) => {
self.download_save(request, resolveContext)
.then((obj) => resolver.doResolve(target, obj, resolveContext, callback))
.catch((err) => {
console.log(err)
callback()
})
})
}
}
It does currently fetch urls starting with https:// but seems to be struggling to resolve urls relative to an http resource. For example
ERROR in _remote/88f978ae6c4a58e98a0a39996416d923ef9ca531.js
Module not found: Error: Can't resolve '/-/#pika/polyfill#v0.0.3/dist=es2017/polyfill.js' in '_remote/'
# _remote/88f978ae6c4a58e98a0a39996416d923ef9ca531.js 25:0-58
# _remote/f80b922b2dd42bdfaaba4e9f4fc3c84b9cc04fca.js
# ./src/index.js
It doesn't look like it tries to resolve relative path to already resolved files. Is there a way to tell the resolver to try to resolve everything?
Main point is: if you have CDN files - you don't need a bundler.
They already minified and ready to use. Just import files in root of your project and call libraries globally.

Categories

Resources