I'm trying to generate very large Javascript files with node. Each one of them contains a const variable which assists for right values at the file (which I can know their values only on runtime). Example:
'use strict'
const lib = require('myLib')
const language = lib.getMyLang()
module.exports = {
"type": lib.getType(language),
"info": lib.getInfo(language),
"title": lib.getTitle(language)
}
When I've tried to generate it, I got the values as strings:
'use strict'
const lib = require('myLib')
const language = lib.getMyLang()
module.exports = {
"type": "lib.getType(language)",
"info": "lib.getInfo(language)",
"title": "lib.getTitle(language)"
}
The whole script is really large, this specific part I did like:
object[key] = "lib.getType(language)";
How can I generate the files right and not as strings?
try this
const test = require('./myLib');
const lang = test.language;
const obj = {};
for (const key in test) {
if (typeof test[key] == 'function') {
let tempKey = key.toLocaleLowerCase().replace('get', '')
obj[tempKey] = test[key](lang)
}
}
module.exports = obj;
Related
I have a command file for a discord bot that contains the command and a piece of parsing logic contained within a function that I want to reuse within my index.js
// file: ./commands/scrumPrompt.js
// The function
const extractDeets = function (f, scrum) {
let items = [];
let re = new RegExp("(\n[ -]*" + f + ".*)", "g");
let replace = new RegExp("[ -]*" + f + "[ ]+");
for (const item of scrum.matchAll(re)) {
items.push(item[1].trim().replace(replace, ""));
}
return items;
};
// The actual command itself within the same file
module.exports = {
name: "scrum",
usage: `!scrum < followed by your message > as per Standup format - refer !show for showing the format`,
description: "Reply to standup prompt",
async execute(message, args) {
if (message.channel.type === "text") {
if (!args.length)
return message.reply(
"Please Provide your scrum as per the format in help menu !scrum < your message >"
);
else {
if (message.author.id !== -1) {
const client = new MongoClient(MONGO_URI);
try {
const database = client.db(DB_NAME);
const members = database.collection("members");
const query = { user_id: message.author.id };
const membersdetail = await members.findOne(query);
if (membersdetail !== null) {
// since this method returns the matched document, not a cursor, print it directly
//console.log("Adding Scrum for ", membersdetail.email);
let userscrum = args.splice(0).join(" ");
// Check if multiple !scrum commands are present in developer scrum message
if (userscrum.includes("!scrum") == false) {
// Expects notations of "-" to exist
let [f, e, b, o, bl] = ["f", "e", "b", "o", "x"];
let features = extractDeets(f, userscrum);
let enhancements = extractDeets(e, userscrum);
let bugs = extractDeets(b, userscrum);
let others = extractDeets(o, userscrum);
let blockers = extractDeets(bl, userscrum);
.
.
.
};
I want to keep the name of the function as extractDeets() itself so that it doesn't mess with the usage within the command as well. I'm not completely sure how to export it into the index.js because it's already kind of being imported here:
// Imports the command file + adds the command to the bot commands collection
for (const file of commandFiles) {
const command = require(`./commands/${file}`);
bot.commands.set(command.name, command);
}
I'm unsure of how to add the function as another import, maybe I should export it into another file and then import it from there? I'm not sure if that's possible or doable here. I've tried directly importing from here but then the command doesn't work, which is troublesome.
You can do it like this:
module.exports = { extractDeets };
Later, you can import it like this:
const { extractDeets } = require('../your_file');
How to combine these two codes, so it doesn't just covert csv to Json (first code), but also save this as an json array in an extra file?(second code)
this (first) code converts csv file to json array:
const fs = require("fs");
let fileReadStream = fs.createReadStream("myCsvFile.csv");
let invalidLineCount = 0;
const csvtojson = require("csvtojson");
csvtojson({ "delimiter": ";", "fork": true })
.preFileLine((fileLineString, lineIdx)=> {
let invalidLinePattern = /^['"].*[^"'];/;
if (invalidLinePattern.test(fileLineString)) {
console.log(`Line #${lineIdx + 1} is invalid, skipping:`, fileLineString);
fileLineString = "";
invalidLineCount++;
}
return fileLineString
})
.fromStream(fileReadStream)
.subscribe((dataObj) => {
console.log(dataObj);
// I added the second code hier, but it wirtes the last object of the array (because of the loop?)
}
});
and this (second) code saves the json array to an external file:
fs.writeFile('example.json', JSON.stringify(dataObj, null, 4);
The quistion is how to put the second codes into the first code (combine them)?
You can use .on('done',(error)=>{ ... }) method. (csvtojson). Push the data into a variable in subscribe method and write the data as JSON in .on('done'). (test was successful).
Check it out:
let fileReadStream = fs.createReadStream("username-password.csv");
let invalidLineCount = 0;
let data = []
csvtojson({ "delimiter": ";", "fork": true })
.preFileLine((fileLineString, lineIdx)=> {
let invalidLinePattern = /^['"].*[^"'];/;
if (invalidLinePattern.test(fileLineString)) {
console.log(`Line #${lineIdx + 1} is invalid, skipping:`, fileLineString);
fileLineString = "";
invalidLineCount++;
}
return fileLineString
})
.fromStream(fileReadStream)
.subscribe((dataObj) => {
// console.log(dataObj)
data.push(dataObj)
})
.on('done',(error)=>{
fs.writeFileSync('example.json', JSON.stringify(data, null, 4))
})
Not sure if you are able to change the library but I would definitely recommend Papaparse for this - https://www.npmjs.com/package/papaparse
Your code would then look something like this:
const fs = require('fs'), papa = require('papaparse');
var readFile = fs.createReadStream(file);
papa.parse(readFile, {
complete: function (results, file) {
fs.writeFile('example.json', JSON.stringifiy(results.data), function (err) {
if(err) console.log(err);
// callback etc
})
}
});
I'm trying to take the input from someone, commandName.substring(7) and immediately put context['display-name'] into the array which matches the name of the input.
var pokemon = [];
var Grookey = [];
pokemon.push(Grookey);
var Thwackey = [];
pokemon.push(Thwackey);
var Rillaboom = [];
pokemon.push(Rillaboom);
function addPokemon () {
var adding = commandName.substring(7);
adding.push(context['display-name']);
}
Is there any way of doing this besides
if (commandName.substring(7) === "Grookey"){
pokemon[0].push(context['display-name']);
}
This is being used for twitch chat where context['display-name'] is the user's #. For example, !trade Grookey would cause pokemon[[Phalanx],[],[]] to happen.
Use a dictionary (object):
var pokemon = {
'Grookey': [],
'Thwackey': [],
'Rillaboom': []
};
function addPokemon () {
var adding = commandName.substring(7);
// adding might be Grookey, Thwackey or Rillaboom...
pokemon[adding].push(context['display-name']);
}
Can you explain why you need use array of array? Let's try use object like this
const pokemon = {
Grookey: [],
Thwackey: [],
Rillaboom: []
}
and then
pokemon[commandName].push(context['display-name']);
Reading a bunch of translation files in a file directory and assigning the data to a global object so that I can pull a translation using i18nContent.messages.en.9999
File tree is like this
locales
messages
en.json => {"9999": "Unexpected Error", "0": "Success"}
de.json => {"9999": "German Error", "0": "German Success"}
emails
en.json => {"signupEmail": "Thanks for signing up", "passwordEmail": "Password changed"}
de.json => {"signupEmail": "German signing up", "passwordEmail": "German Password changed"}
I am able to get all of the "names" for each of the nested objects. However, I am unable to assemble the total object during the directory walk.
ATTEMPT 1
global.i18nContent = {};
walkDir(dir, function (filePath, dir) {
if (filePath.substr(-5) === ".json") {
let directory = dir.split(/[\s\/]+/);
directory = directory[directory.length - 1];
let lang = filePath.split(/[\s\/]+/);
lang = lang[lang.length - 1].substr(-7, 2);
//this substr fix is to make this work on Macs
let rem = __dirname.toString().substr(0, __dirname.toString().length - 3);
let langFolder = {};
langFolder[lang] = require(filePath.replace(rem, '../'));
Object.assign(i18nContent[directory], langFolder);
}
});
ATTEMPT 2
Removed
let langFolder = {};
langFolder[lang] = require(filePath.replace(rem, '../'));
Object.assign(i18nContent[directory], langFolder);
and just tried i18nContent[directory][lang] = require(filePath.replace(rem, '../')) or i18nContent[directory[lang]] = require(filePath.replace(rem, '../'))
console output is showing ['undefined']
Should be nested to reference like i18nContent.messages.en.9999
I assume that you are using walkdir, right? Then I would just do it like this:
global.i18nContent = {};
const emitter = walkDir(yourDir);
emitter.on('file', filename => {
const parts = filename.match(/^.*\/locales\/(\w*)\/(\w*)\.json$/);
if (!parts) return;
const [, folderName, lang] = parts;
if (!global.i18nContent[folderName]) global.i18nContent[folderName] = {};
global.i18nContent[folderName][lang] = require(filename);
});
I have a text file where each line is separated into 4 categories by colons and I want to put this into a JSON file, where each category is a value to the corresponding name in the JSON file.
Example data.txt file:
Date1:cat1:dog1:bug1
Date2:cat2:dog2:bug2
Date3:cat3:dog3:bug3
Example JSON file:
{
"Date1": {
"cat": "cat1",
"dog": "dog1",
"bug": "bug1"
},
"Date2": {
"cat": "cat2",
"dog": "dog2",
"bug": "bug2"
...
...
}
I've never used JSON before but I think that's how to format it. How would I sort each line using the colons as markers for the next value and store it in the JSON file with the correct name using JavaScript and Node.js?
Use the csv package if you don't want to handle parsing csv file by yourself.
const fs = require("fs");
const csv = require("csv");
const result = {};
const keys = ["cat", "dog", "bug"]
// Read data
const readStream = fs.createReadStream("yourfile.txt");
// Parser
const parser = csv.parse({ delimiter: ":" });
parser.on("data", (chunk) => {
result[chunk[0]] = {};
for(let i = 1; i < chunk.length; i ++) {
result[chunk[0]][keys[i - 1]] = chunk[i];
}
});
parser.on("end", () => {
console.log(result);
});
readStream.pipe(parser);
If your JSON has this defined structure you can go about it with the following code:
import * as fs from 'fs';
/* If you have a large file this is a bad Idea, refer to reading from a stream
* From zhangjinzhou's answer
*/
const file = fs.readFileSync('path/to/data.txt', 'utf8');
const json = file.split(/\n|\r\n/).map(line => {
const values = line.split(":");
let obj = {}
obj[values[0]] = {
cat: values[1],
dog: values[2],
bug: values[3],
};
return obj
}).reduce((acc, current) => Object.assign(acc, current), {})
Using RegExp and Array#forEach, convert the string to lines, then iterate over them and fill up the object with the corresponding data via:
const dataFileContent =
`Date1:cat1:dog1:bug1
Date2:cat2:dog2:bug2
Date3:cat3:dog3:bug3`;
function processData(data) {
// convert to lines
const lines = data.match(/[^\r\n]+/g) || [];
const object = {};
// iterate over the lines
lines.forEach(line => {
const parts = line.split(':');
const main = parts.shift();
const pattern = /^(.*?)(\d+)$/;
// create an object for each main part
object[main] = {};
// fill each main part with the sub parts
parts.forEach(part => {
const match = part.match(pattern) || [];
const key = match[1];
const value = match[2];
if (match) {
object[main][key] = key + value;
}
});
});
return object;
}
const processedData = processData(dataFileContent);
console.log(processedData);
Then convert the processedData to JSON by using JSON.stringify and save it to a file via:
const fs = require('fs');
...
// processData
...
const json = JSON.stringify(processedData);
fs.writeFile('my_json_file.json', json, 'utf8');
For larger files, consider using Streams in Node.js as suggested by #zhangjinzhou.