How to read/write to a JSON file in node.js - javascript

I am fairly new to node.js and i am wondering how to (or even if) i can read and write to a JSON file. I am trying to create an accessible punishment history.
Ideally i would want to be able to create something along the lines of this:
{
"punishments": {
"users": {
"<example user who has a punishment history>": {
"punishment-1567346": {
"punishment-id": "1567346",
"punishment-type": "mute",
"punishment-reason": "<reason>"
},
"punishment-1567347": {
"punishment-id": "1567347",
"punishment-type": "ban",
"punishment-reason": "<reason>"
}
}
}
}
}
Then i would have a way to access the formatted punishment history. I genuinely have no clue where to start.

You can use a NodeJS built-in library called fs to do read/write operations.
Step #1 - Import fs
const fs = require('fs');
Step #2 - Read the file
let rawdata = fs.readFileSync('punishmenthistory.json');
let punishments= JSON.parse(rawdata);
console.log(punishments);
Now you can use the punishments variable to check the data inside the JSON File. Also, you can change the data but it only resides inside the variable for now.
Step #3 - Write to the File
let data = JSON.stringify(punishments);
fs.writeFileSync('punishmenthistory.json', data);
Full code:
const fs = require('fs');
let rawdata = fs.readFileSync('punishmenthistory.json');
let punishments= JSON.parse(rawdata);
console.log(punishments);
let data = JSON.stringify(punishments);
fs.writeFileSync('punishmenthistory.json', data);
References:
https://stackabuse.com/reading-and-writing-json-files-with-node-js/

Use NodeJS File System https://nodejs.org/dist/latest-v14.x/docs/api/fs.html.
Here I have used writeFileSync API to write to file and readFileSync to read from file. Also, when writing don't forget to JSON.stringify(data) because you are writing the data to a JSON file.
const fs = require("fs");
const path = require("path");
// Write Data
const data = {
"punishments": {
"users": {
"<example user who has a punishment history>": {
"punishment-1567346": {
"punishment-id": "1567346",
"punishment-type": "mute",
"punishment-reason": "<reason>"
},
"punishment-1567347": {
"punishment-id": "1567347",
"punishment-type": "ban",
"punishment-reason": "<reason>"
}
}
}
}
};
fs.writeFileSync(path.join(__dirname, "outputfilepath", "outputfile.json"), JSON.stringify(data), "utf8");
// Read data
const rData = fs.readFileSync(path.join(__dirname, "outputfilepath", "outputfile.json"), "utf8");
const jsonData = JSON.parse(rData);
Here is the working example,
https://repl.it/repls/OutrageousInbornBruteforceprogramming#index.js

you can do something like this for reading:
const fs = require('fs')
function jsonReader(filePath, cb) {
fs.readFile(filePath, (err, fileData) => {
if (err) {
return cb && cb(err)
}
try {
const object = JSON.parse(fileData)
return cb && cb(null, object)
} catch(err) {
return cb && cb(err)
}
})
}
jsonReader('./customer.json', (err, customer) => {
if (err) {
console.log(err)
return
}
console.log(customer.address) // => "Infinity Loop Drive"
})
and like this for writing:
const fs = require('fs')
const customer = {
name: "Newbie Co.",
order_count: 0,
address: "Po Box City",
}
const jsonString = JSON.stringify(customer)
fs.writeFile('./newCustomer.json', jsonString, err => {
if (err) {
console.log('Error writing file', err)
} else {
console.log('Successfully wrote file')
}
})

Related

Node.js batch create files to every folders in current directory with loop

I have sub 20 directories inside the current directory. I would like to create 'README.md' for every folder inside the current directory. Here is I've tried so far,
const fs = require('fs').promises;
const path = require('path');
async function readDirectory(filePath) {
try {
const data = await fs.readdir(filePath);
data.forEach(async file => {
fs.writeFile('README.md', file);
});
} catch (err) {
console.log(err.message);
}
}
readDirectory(path.resolve(__dirname));
but the result is not what I'm looking for. Any suggestion?
First writeFile parameters should be full path of file you want to create/write
fs.writeFile( file, data, options, callback )
So I changed your code a little bit
const fs = require('fs');
const path = require('path');
async function readDirectory(filePath) {
const data = await fs.readdirSync(filePath);
const folders = data.filter((item) => fs.lstatSync(item).isDirectory());
folders.map((folder) => {
const subFilePath = path.join(filePath, `/${folder}` + `/README.md`);
fs.writeFile(subFilePath, '', (err) => {
if (err) console.log(err);
else {
console.log(`File ${subFilePath} created`);
}
});
});
}
readDirectory(path.resolve(__dirname));

write to a JSON folder from the following element

I want to know how to read/write from/to json files.
const Discord = require ('discord.js');
const client = new Discord.Client();
const {prefix, token,} = require('./config.json');
const fs = require ('fs');
client.login(token)
client.on('message', message => {
if(message.content.startsWith(prefix + "TC")) { //TC = team create
var args = message.content.split(' ').join(' ').slice(4);
if(!args) return message.channel.send("No")
var TeamCreate = `{"NameTeam": "${args}", "ManagerTeam": ${message.author.id}}`
fs.writeFile("./team.json", TeamCreate, (x) => {
if (x) console.error(x)
})}});
The json file will display :
{"NameTeam": "teste","ManagerTeam": 481117441955463169}
And I would like that each time we place the order, it gets added to the json file.
Example:
1 first order = {"NameTeam": "teste","ManagerTeam": 481117441955463169}
2 second order = {"NameTeam": "teste","ManagerTeam": 481117441955463169}, {"NameTeam": "teste2","ManagerTeam": 1234567890}
From what I understand, you want to make a json file that contains a list of teams.
The easy way to do that is to read and parse the json, make changes to it, and then stringify the jsonand update the file. Also, making json with strings is really messy and can lead to syntax errors, while in javascript turning js objects into json is as simple as doing JSON.stringify(javascriptObject)
Try something like this:
const Discord = require('discord.js');
const client = new Discord.Client();
const { prefix, token, } = require('./config.json');
const fs = require('fs');
client.login(token)
client.on('message', message => {
if (message.content.startsWith(prefix + "TC")) { //TC = team create
var args = message.content.split(' ').join(' ').slice(4);
if (!args) return message.channel.send("No")
var team = {
NameTeam: args,
ManagerTeam: message.author.id
}
fs.readFile("./team.json", (err, data) => {
if (!err && data) {
var parsedJson;
try {
parsedJson = JSON.parse(data);
//Make sure the parsedJson is an array
if (!(parsedJson instanceof Array)) {
parsedJson = [];
}
}
catch (e) {
console.log("Couldn't parse json.");
parsedJson = [];
}
finally {
//Add the newly created team to parsedJson
parsedJson.push(team);
//Write file, stringifying the json.
fs.writeFile("./team.json", JSON.stringify(parsedJson), (err) => {
if (!err) {
console.log("Successfully created team.");
}
else {
console.log("Error writing to file.")
}
});
}
}
else {
console.log("Error reading json");
}
});
}
});
Hope this helps and good luck.

Read Props of Promise of Stream in nodejs

basically what I want to achieve is check in a middleware whether an uploaded file has the correct image type (png for example). This is what I have come up with till now:
export const fileCheckMiddleware = (req, res, next) => {
const acceptedImageTypes = ["image/gif", "image/jpeg", "image/png"];
const oldWrite = res.write;
const oldEnd = res.end;
const chunks = [];
res.write = (...restArgs) => {
chunks.push(new Buffer(restArgs[0]));
oldWrite.apply(res, restArgs);
};
res.end = async (...restArgs) => {
if (restArgs[0]) {
chunks.push(new Buffer(restArgs[0]));
}
const body = Buffer.concat(chunks).toString("utf8");
try {
let parsedBody = {};
try {
parsedBody = JSON.parse(body);
} catch (err) {
parsedBody = { data: { unparsedBody: body } };
}
const { variables } = req.body;
console.log("\x1b[1m%s\x1b[0m", "LOG variables", variables.file);
if (variables.file) {
console.log("\x1b[1m%s\x1b[0m", "LOG type", typeof variables.file);
}
} catch (err) {}
oldEnd.apply(res, restArgs);
};
next();
};
The logged type of variables.file is an object. And the result of the console.log is this:
LOG variables Promise {
{ filename: 'trump.jpeg',
mimetype: 'image/jpeg',
encoding: '7bit',
createReadStream: [Function: createReadStream] } }
So how can I access the mimetype here? I tried to map over the keys, variables.file["Promise"],...
Promise is not a key of variables.file, it's the type of variables.file. That means your code starts executing as soon as the HTTP request starts, and the file is received asynchronously, so you have to do something like:
variables.file.then(file => {
// Do whatever you want with the file
next();
});
Or declare the surrounding function as async and do this:
const file = await variables.file;
// Do whatever you want with the file
next();

How can I generate a `V1Job` object for the Kubernetes nodejs API client from a yaml file?

I've done this previously in python using:
with open(path.join(path.dirname(__file__), "job.yaml")) as f:
body= yaml.safe_load(f)
try:
api_response = api_instance.create_namespaced_job(namespace, body)
Looking at source of the nodejs api client:
public createNamespacedJob (namespace: string, body: V1Job, includeUninitialized?: boolean, pretty?: string, dryRun?: string, options: any = {}) : Promise<{ response: http.IncomingMessage; body: V1Job; }> {
How can I generate that the V1Job?
I've tried the below but get back a very verbose error message / response:
const k8s = require('#kubernetes/client-node');
const yaml = require('js-yaml');
const fs = require('fs');
const kc = new k8s.KubeConfig();
kc.loadFromDefault();
const k8sApi = kc.makeApiClient(k8s.BatchV1Api);
var namespace = {
metadata: {
name: 'test123',
},
};
try {
var job = yaml.safeLoad(fs.readFileSync('job.yaml', 'utf8'));
k8sApi.createNamespacedJob(namespace, job).then(
(response) => {
console.log('Created namespace');
console.log("Success!")
},
(err) => {
console.log(err);
console.log(job);
console.log("Err")
},
);
} catch (e) {
console.log(e);
}
V1Job seems to be an ordinary object so the below worked.
Namespace had to be a string rather than an object...
const k8s = require('#kubernetes/client-node');
const yaml = require('js-yaml');
const fs = require('fs');
const kc = new k8s.KubeConfig();
kc.loadFromDefault();
const k8sApi = kc.makeApiClient(k8s.BatchV1Api);
try {
var job = yaml.safeLoad(fs.readFileSync('job.yaml', 'utf8'));
k8sApi.createNamespacedJob("default", job).then(
(response) => {
console.log("Success")
},
(err) => {
console.log(e);
process.exit(1);
},
);
} catch (e) {
console.log(e);
process.exit(1);
}
This is the same as chris-stryczynski's example with 2 slight modifications. Also please note that chris-stryczynski's example with NodeJs-8 results in (at least on my side):
(upon execution of k8sApi.createNamespacedJob)
TypeError [ERR_INVALID_ARG_TYPE]: The "original" argument must be of type function at promisify
This error does not occur with NodeJs-12.
Here is the modified version:
const k8s = require('#kubernetes/client-node');
const yaml = require('js-yaml');
const fs = require('fs');
const kc = new k8s.KubeConfig();
kc.loadFromDefault(); //You might consider using kc.loadFromFile(...) here
const k8sApi = kc.makeApiClient(k8s.BatchV1Api);
try {
var job = yaml.load(fs.readFileSync('job.yaml', 'utf8')); // Change#1 safeLoad->load
k8sApi.createNamespacedJob("default", job).then(
(response) => {
console.log("Success")
},
(err) => {
console.log(err); // Change#2 e->err
process.exit(1);
},
);
} catch (e) {
console.log(e);
process.exit(1);
}

How to Read a Objects in a .txt Filepath in Javascript When Given the Filepath

I have a filepath that leads to a .txt file that has a number of objects in it. I'm trying to write a JavaScript function that will take in this filepath as an argument and allow me to access and iterate over these objects, but everything I've tried and found online doesn't work. Is there a technique to accomplish this task?
I'm just trying to in vs code. The contents of the .txt file are:
{"food": "chocolate", "eaten", true}
{"food": "hamburger", "eaten", false}
{"food": "peanuts", "eaten", true}
{"food": "potato", "eaten", true}
I tried just iterating over the file path as an argument but that didn't work and it just returned the file path itself, and I have had no luck with any of the read file solutions on this site.
I know in Ruby this is easily accomplishable through:
File.open("my/file/path", "r") do |f|
f.each_line do |line|
puts line
end
end
But I am confused about the JavaScript solution.
const fs = require('fs');
fs.readFile('txtFilePath', 'utf8', (err, data) => {
const toValidJSON = data.replace(/"eaten",/g, '"eaten":').replace(/\}[\r\n]+\{/g, '},{');
const validJSON = `[${toValidJSON}]`
const arr = JSON.parse(validJSON);
console.log(arr)
});
for this question only
In Node.js, if you want a streaming approach, extend a Transform stream to parse JSON between line separators:
const { Transform } = require('stream')
module.exports = class DelimitedJSONTransform extends Transform {
constructor ({ delimiter = '\n', encoding = 'utf8', reviver = null } = {}) {
super({ readableObjectMode: true })
this._delimiter = delimiter
this._encoding = encoding
this._reviver = reviver
this._buffer = ''
}
_transform (chunk, encoding, callback) {
switch (encoding) {
case 'buffer':
this._buffer += chunk.toString(this._encoding)
break
default:
this._buffer += chunk
break
}
const lines = this._buffer.split(this._delimiter)
const latest = lines.pop()
try {
while (lines.length > 0) {
this.push(JSON.parse(lines.shift(), this._reviver))
}
callback()
} catch (error) {
callback(error)
} finally {
lines.push(latest)
this._buffer = lines.join(this._delimiter)
}
}
_flush (callback) {
if (!this._buffer.trim()) {
return
}
const lines = this._buffer.split(this._delimiter)
try {
while (lines.length > 0) {
this.push(JSON.parse(lines.shift(), this._reviver))
}
callback()
} catch (error) {
callback(error)
}
}
}
Usage
const { createReadStream } = require('fs')
const DelimitedJSONTransform = require('./transform') // or whatever you named the file above
let fileStream = createReadStream('jsons.txt')
let jsonTransform = fileStream.pipe(new DelimitedJSONTransform())
jsonTransform
.on('data', object => { console.log(object) })
.on('error', error => { console.error(error) })

Categories

Resources