Suppose I have 2 two processes like these:
file1.js
let variable1 = "variable1"
file2.js
let variable2 = "variable2"
that have both been spawned using
node file1.js
node file2.js
Is there a way to let them communicate? For example can I get variable1's value from file2.js?
If you create a master node js and fork 2 child node js process than you can communicate data between parent and child.
Basic example:
const fork = require('child_process').fork;
const program = path.resolve('child.js');
const parameters = [];
const options = {
stdio: [ 'pipe', 'pipe', 'pipe', 'ipc' ]
};
const child = fork(program, parameters, options);
child.on('message', message => {
console.log('message from child:', message);
child.send('Hi');
});
More:
https://nodejs.org/api/child_process.html#child_process_child_process_fork_modulepath_args_options
Related
I have problem to pass process dir. i want to spawn mongod process. application build with pkg. i tested to access without spawn section. like console.log(args[1]). it return dir process. after i uncomment that it said line 4
ReferenceError: Cannot access 'process' before initialization
const { spawn } = require('child_process');
const { parse } = require('path')
const processPath = parse(process.argv[0]);
const processDir = processPath.dir;
const executableName = 'mongod';
const args = [
'-f', `${__dirname}/configs/mongodb.yml`,
'--dbpath', `${processDir}/database/data`,
'--logpath', `${processDir}/database/log/system.log`,
];
const options = {
cwd: `${processDir}/bin`
};
const process = spawn(executableName, args, options);
process.stdout.on('data', chunk => {
console.log(chunk.toString())
})
my build dir, inside bin theres mongod executable.
build
build/program.exe
build/bin
build/database
build/database/data
build/database/log/system.log
i seperate assets that not include into real device filesystem. the rest inside snapshot filesystem (pkg virtual filesystem)
I create for my project a log module and i am actually creating a new instance in all module to allow them to log in cli with the right syntax,color conf etc.
For example (a simplified example)
// index.js
const {Log,Ansi} = require("./class/log.js");
const Tool = require("./class/tool.js");
const argv = require("yargs").argv;
let log = new Log({
levelIcon:true,
powerlineRoot:{
name:"root",
backgroundColor:Ansi.BLACK_BRIGHT,
text: "myappName"
}
});
let tool = new Tool(argv.toolName,argv.envName)
tool.run().then(() => {
log.print("Tool is running","info");
}).catch((err) => {
log.print(err,"critical");
});
// tool.js
const {Log,Ansi} = require("./log.js");
class Tool {
let log = new Log({
levelIcon:true,
powerlineRoot:{
name:"root",
backgroundColor:Ansi.BLACK_BRIGHT,
text: "myappName"
}
});
run(){
return new Promise((resolve, reject) => {
resolve()
}
}
}
module.exports = Tool
I am wondering if there is a way to create only one instance in my index.js and share it with the instance of modules like Tools. I don't know if it's possible but i think that it will be less memory consumption to share one instance of Log than creating multiple one
I hope that my question is enough clear. Feel free to ask me more information if needed
Yes you can do that absolutely. Since the entry is index.js you can consider it will finally run as if it was a single file, in a single thread. You can create one more module logger.js like:
const {Log,Ansi} = require("./class/log.js");
const logger = new Log({
levelIcon:true,
powerlineRoot:{
name:"root",
backgroundColor:Ansi.BLACK_BRIGHT,
text: "myappName"
}
});
module.exports = logger;
Now you can just import logger and use it like:
const logger = require("./logger")
logger.print("hello world!");
Like #jjsingh says i use global variable to store the object.
Not sure it's the better way but for the moment it resolve my issue.
global.log = new Log({
levelIcon:true,
powerlineRoot:{
name:"root",
color:{
background:Ansi.BLUE_SEA,
foreground:Ansi.RED,
},
text:global.conf.get("infos").name
}
});
I am trying to use pino for logging in to my node app Server and I do have some large logs coming, so rotating the files every day would be more practical.
So I used pino.multistream() and require('file-stream-rotator')
My code works, but for performance reasons, I would not like to use the streams in the main thread.
according to the doc, I should use pino.transport():
[pino.multistream()] differs from pino.transport() as all the streams will be executed within the main thread, i.e. the one that created the pino instance.
https://github.com/pinojs/pino/releases?page=2
However, I can't manage to combine pino.transport() and file-stream-rotator.
my code that does not work completely
-> logs the first entries, but is not exportable because it blocks the script with the error
throw new Error('the worker has exited')
Main file
const pino = require('pino')
const transport = pino.transport({
target: './custom-transport.js'
})
const logger = pino(transport)
logger.level = 'info'
logger.info('Pino: Start Service Logging...')
module.exports = {
logger
}
custom-transport.js file
const { once } = require('events')
const fileStreamRotator = require('file-stream-rotator')
const customTransport = async () => {
const stream = fileStreamRotator.getStream({ filename: 'myfolder/custom-logger.log', frequency: 'daily' })
await once(stream, 'open')
return stream
}
module.exports = customTransport
I’ve logger which I initiate using a constractor in the index.js file. Now I need
To pass the logger instance to other files, and I do it like this
index.js
const books = require(“./books”);
books(app, logger);
logger = initLogger({
level: levels.error,
label: “app”,
version: "0.0.1",
});
app.listen(port, () => logger.info(`listening on port ${port}`));
And inside the books.js file I use it like following, get the logger from the index.js file and use it
inside the books.js file, also pass it to another file with the function isbn.get(books, logger);,
Is it recommended to do it like this? Is there a cleaner way in nodes ?
books.js
const isbn = require(“./isbn”);
module.exports = async function (app, logger) {
…
try {
Let books = await getBooks();
logger.info(“get “books process has started”);
} catch (err) {
logger.error("Failed to fetch books", err);
return;
}
…
// this function is from the file “isbn” and I should pass the logger to it also
try {
let url = await isbn.get(books, logger);
} catch (e) {
res.send(e.message);
}
}
Try creating a module specifically for your logger configuration, then you can import that into your modules instead of using a side-effect of your business module to create a logger.
This will help if you ever need/want to change your logger configuration - instead of following a chain of business methods, you can just update the log configuration.
Example
logger.js
'use strict';
// Any setup you need can be done here.
// e.g. load log libraries, templates etc.
const log = function(level, message) {
return console.log(level + ": " + message);
};
module.exports = log;
business-logic.js
'use strict';
var log = require('./logger');
var stuff = require('./stuff');
const do_stuff = function (thing) {
// do stuff here
log("INFO", "Did stuff");
}
This is a pretty clean way of doing it, however it could be awkward when trying to share more variables or adding more requires. So, you could put all the variables in an object and destructure only the variables you need in books.js:
index.js:
const state = {app, logger, some, other, variables};
require("./books")(state);
require("./another_file")(state);
books.js:
module.exports = async function ({app, logger}) {
};
So I'm planning to separate my functions into separate files and then import them into a single index.js which then becomes the main exporter. So I'm wondering if having something like var bcrypt = require('bcrypt') in several of my files be slower than just having it in one file.
Here's how I'm planning to group and export in index.js
const fs = require('fs');
const path = require('path')
const modules = {}
const files = fs.readdirSync(__dirname)
files.forEach(file => {
if (file === 'index.js') return
let temp = require(path.join(__dirname, file))
for (let key in temp) {
modules[key] = temp[key]
}
});
module.exports = modules
As an example of what I mean:
file1.js
var bcrypt = require("bcrypt");
module.exports.file1test = "hi"
file2.js
var bcrypt = require("bcrypt");
module.exports.file2test = "bye"
No, it does not. Whenever a module is required for the first time, the module's code runs, assigns something to its exports, and those exports are returned. Further requires of that module simply reference those exports again. The logic is similar to this:
const importModule = (() => {
const exports = {};
return (name) => {
if (!exports[name]) exports[name] = runModule(name);
return exports[name];
};
})();
So, multiple imports of the same module is no more expensive than referencing an object multiple times.