Writing on JSON file writes but didn't save it - javascript

Is it normal on writing into JSON File but not saving it? if so, how do you actually save it if its done? After I write into JSON file and read it again... it came back to normal...
module.exports = {
data: async function() {
const fs = require('fs');
let obj;
fs.readFile('test.json', 'utf8', function readFileCallBack(err, data) {
if (err) console.log(err);
else if (data !== undefined) {
obj = JSON.parse(data);
obj.table.push({ id: 1, name: 'test' });
json = JSON.stringify(obj);
console.log(obj); //{ table: [{id:1, name: 'test'}] }
fs.writeFile('test.json', json, readFileCallBack);
}
});
fs.readFile('test.json', 'utf8', function readFileCallBack(err, data) {
if (err) console.log(err);
else if (data !== undefined) {
obj = JSON.parse(data);
console.log(obj); //{ table: [] }
}
});
}
}
My JSON File:
{
"table": [
]
}
Is there someting to do with localstorage.setItem() to actually save it?

Since JS is asynchronous in nature, both of your fs.readFile functions will be executed in parallel. So, you want to update the data that is there in the JSON file. To handle asynchronous code you can use callbacks or promises. Here is an example using promise ( async/await )
const fs = require("fs");
const util = require("util");
const defaultObj = {
table: []
};
const readFilePromise = util.promisify(fs.readFile);
const writeFilePromise = util.promisify(fs.writeFile);
async function readFromJSON() {
const data = await readFilePromise('test.json', { encoding: "utf-8" });
return data.toString("utf-8");
}
function writeToJSON(json) {
return writeFilePromise('test.json', json);
}
async function getData() {
let data = await readFromJSON(); // read from JSON file
const obj = data ? JSON.parse(data) : defaultObj;
obj.table.push({ id: 1, name: 'test' });
const json = JSON.stringify(obj);
await writeToJSON(json); // update the data
return obj;
}
module.exports = {
data: getData
};

Related

How to read multiple json file using fs and bulk request

I'm using elasticsearch search engine with my react app, I was reading one file at the backend as you see in the code and it work perfectly, but now I want to read three different JSON files to three different indexes using the "fs" package and bulk request, can you please help me?
the code:
// Start reading the json file
fs.readFile("DocRes.json", { encoding: "utf-8" }, function (err, data) {
if (err) {
throw err;
}
// Build up a giant bulk request for elasticsearch.
bulk_request = data.split("\n").reduce(function (bulk_request, line) {
var obj, ncar;
try {
obj = JSON.parse(line);
} catch (e) {
console.log("Done reading 1");
return bulk_request;
}
// Rework the data slightly
ncar = {
id: obj.id,
name: obj.name,
summary: obj.summary,
image: obj.image,
approvetool: obj.approvetool,
num: obj.num,
date: obj.date,
};
bulk_request.push({
index: { _index: "ncar_index", _type: "ncar", _id: ncar.id },
});
bulk_request.push(ncar);
return bulk_request;
}, []);
// A little voodoo to simulate synchronous insert
var busy = false;
var callback = function (err, resp) {
if (err) {
console.log(err);
}
busy = false;
};
// Recursively whittle away at bulk_request, 1000 at a time.
var perhaps_insert = function () {
if (!busy) {
busy = true;
client.bulk(
{
body: bulk_request.slice(0, 1000),
},
callback
);
bulk_request = bulk_request.slice(1000);
console.log(bulk_request.length);
}
if (bulk_request.length > 0) {
setTimeout(perhaps_insert, 100);
} else {
console.log("Inserted all records.");
}
};
perhaps_insert();
});
You can create multiple promises for each file read and feed it to the elastic search bulk_request.
const fsPromises = require('fs').promises,
files = ['filename1', 'filename1'],
response = [];
const fetchFile = async (filename) => {
return new Promise((resolve, reject) => {
const path = path.join(__dirname, filename);
try {
const data = await fsPromises.readFile(path)); // make sure path is correct
resolve(data);
} catch (e) {
reject(e)
}
});
files.forEach((fileName) => results.push(fetchFile()));
Promise.all(results).then(data => console.log(data)).catch(e => console.log(e));
}
Once you get data from all the promises pass it to the elastic search.

How to read/write to a JSON file in node.js

I am fairly new to node.js and i am wondering how to (or even if) i can read and write to a JSON file. I am trying to create an accessible punishment history.
Ideally i would want to be able to create something along the lines of this:
{
"punishments": {
"users": {
"<example user who has a punishment history>": {
"punishment-1567346": {
"punishment-id": "1567346",
"punishment-type": "mute",
"punishment-reason": "<reason>"
},
"punishment-1567347": {
"punishment-id": "1567347",
"punishment-type": "ban",
"punishment-reason": "<reason>"
}
}
}
}
}
Then i would have a way to access the formatted punishment history. I genuinely have no clue where to start.
You can use a NodeJS built-in library called fs to do read/write operations.
Step #1 - Import fs
const fs = require('fs');
Step #2 - Read the file
let rawdata = fs.readFileSync('punishmenthistory.json');
let punishments= JSON.parse(rawdata);
console.log(punishments);
Now you can use the punishments variable to check the data inside the JSON File. Also, you can change the data but it only resides inside the variable for now.
Step #3 - Write to the File
let data = JSON.stringify(punishments);
fs.writeFileSync('punishmenthistory.json', data);
Full code:
const fs = require('fs');
let rawdata = fs.readFileSync('punishmenthistory.json');
let punishments= JSON.parse(rawdata);
console.log(punishments);
let data = JSON.stringify(punishments);
fs.writeFileSync('punishmenthistory.json', data);
References:
https://stackabuse.com/reading-and-writing-json-files-with-node-js/
Use NodeJS File System https://nodejs.org/dist/latest-v14.x/docs/api/fs.html.
Here I have used writeFileSync API to write to file and readFileSync to read from file. Also, when writing don't forget to JSON.stringify(data) because you are writing the data to a JSON file.
const fs = require("fs");
const path = require("path");
// Write Data
const data = {
"punishments": {
"users": {
"<example user who has a punishment history>": {
"punishment-1567346": {
"punishment-id": "1567346",
"punishment-type": "mute",
"punishment-reason": "<reason>"
},
"punishment-1567347": {
"punishment-id": "1567347",
"punishment-type": "ban",
"punishment-reason": "<reason>"
}
}
}
}
};
fs.writeFileSync(path.join(__dirname, "outputfilepath", "outputfile.json"), JSON.stringify(data), "utf8");
// Read data
const rData = fs.readFileSync(path.join(__dirname, "outputfilepath", "outputfile.json"), "utf8");
const jsonData = JSON.parse(rData);
Here is the working example,
https://repl.it/repls/OutrageousInbornBruteforceprogramming#index.js
you can do something like this for reading:
const fs = require('fs')
function jsonReader(filePath, cb) {
fs.readFile(filePath, (err, fileData) => {
if (err) {
return cb && cb(err)
}
try {
const object = JSON.parse(fileData)
return cb && cb(null, object)
} catch(err) {
return cb && cb(err)
}
})
}
jsonReader('./customer.json', (err, customer) => {
if (err) {
console.log(err)
return
}
console.log(customer.address) // => "Infinity Loop Drive"
})
and like this for writing:
const fs = require('fs')
const customer = {
name: "Newbie Co.",
order_count: 0,
address: "Po Box City",
}
const jsonString = JSON.stringify(customer)
fs.writeFile('./newCustomer.json', jsonString, err => {
if (err) {
console.log('Error writing file', err)
} else {
console.log('Successfully wrote file')
}
})

How can I retrieve the data from Promise object in React?

Here is my code snippet for parsing application data:
async function parseApplication(data: Application) {
const fieldGroupValues = {};
for (const group of Object.keys(data.mappedFieldGroupValues)) {
const groupValue = data.mappedFieldGroupValues[group];
for (const fieldName of Object.keys(groupValue.mappedFieldValues)) {
const { fieldValue } = groupValue.mappedFieldValues[fieldName];
}
return fieldGroupValues;
}
But I receive data as Promise object, how can I retrieve data from Promise?
In you example you are combining both of await and .then(), I would use only one of them.
Preferably await as the following:
try {
const dict = await getDictionaryByKey(fieldValue.value.entityDefinitionCode);
const dictItem = dict.find((item) => fieldValue.value.entityId === item.code);
acc[fieldName] = dictItem ? dictItem.text : fieldValue.value.entityId;
} catch (err) {
acc[fieldName] = fieldValue.value.entityId;
}

Read Props of Promise of Stream in nodejs

basically what I want to achieve is check in a middleware whether an uploaded file has the correct image type (png for example). This is what I have come up with till now:
export const fileCheckMiddleware = (req, res, next) => {
const acceptedImageTypes = ["image/gif", "image/jpeg", "image/png"];
const oldWrite = res.write;
const oldEnd = res.end;
const chunks = [];
res.write = (...restArgs) => {
chunks.push(new Buffer(restArgs[0]));
oldWrite.apply(res, restArgs);
};
res.end = async (...restArgs) => {
if (restArgs[0]) {
chunks.push(new Buffer(restArgs[0]));
}
const body = Buffer.concat(chunks).toString("utf8");
try {
let parsedBody = {};
try {
parsedBody = JSON.parse(body);
} catch (err) {
parsedBody = { data: { unparsedBody: body } };
}
const { variables } = req.body;
console.log("\x1b[1m%s\x1b[0m", "LOG variables", variables.file);
if (variables.file) {
console.log("\x1b[1m%s\x1b[0m", "LOG type", typeof variables.file);
}
} catch (err) {}
oldEnd.apply(res, restArgs);
};
next();
};
The logged type of variables.file is an object. And the result of the console.log is this:
LOG variables Promise {
{ filename: 'trump.jpeg',
mimetype: 'image/jpeg',
encoding: '7bit',
createReadStream: [Function: createReadStream] } }
So how can I access the mimetype here? I tried to map over the keys, variables.file["Promise"],...
Promise is not a key of variables.file, it's the type of variables.file. That means your code starts executing as soon as the HTTP request starts, and the file is received asynchronously, so you have to do something like:
variables.file.then(file => {
// Do whatever you want with the file
next();
});
Or declare the surrounding function as async and do this:
const file = await variables.file;
// Do whatever you want with the file
next();

can't get a correct JSON format, instead i am messing up

What do i want?
good question, isn't it? Well...
I am working on a application to calculate budgets with electron-vue.
In my App i try to save the users in a JSON file to create a opportunity to hold them after a application restart.
the JSON File should look like this:
{
"deniz": {
"salary": 1234,
},
"hüseyin": {
"salary": 4321,
}
}
What do i get?
I am getting this instead:
{
"deniz": {
"salary": 1234
}
}{
"hüseyin": {
"salary": 4321
}
}
Problem is, its a wrong JSON format. I am creating a whole new obj inside a obj.
How am i doing this?
I created a userDataControllerMixin.js to separate the logic from the component it self.
I have two InputFields in my component, 1.userName and 2.userSalary to collect the user data.
Inside my userDataControllerMixin.js:
export const userDataControllerMixin = {
data() {
return {
userDataAbsPath: 'src/data/userData.json',
};
},
mounted() {
this.getUsers();
},
methods: {
// FETCH THE userData.json
getUsers() {
const fs = require('fs');
const loadJSON = fs.readFile('src/data/userData.json', 'utf8', (err, data) => {
if (err) {
console.log(`failed to read file: ${err}`);
}
// console.log(data);
});
return loadJSON;
},
// USING THIS CONSTRUCTOR TO BUILD A JSON FORMAT
User(user, salary) {
this[user] = {
salary: Number(salary),
};
return user;
},
// GET INPUT FROM USERS INPUTBOX
getInput(inputName, inputSalary) {
const userName = this.inputName;
const userSalary = this.inputSalary;
const user = new this.User(userName, userSalary);
console.log(user);
this.createOrLoadJSON(user);
},
// CREATES A JSON WITH DATA FROM THE USERS
createOrLoadJSON(data) {
const fs = require('fs');
const json = JSON.stringify(data, null, 4);
if (fs.existsSync(this.userDataAbsPath)) {
console.log('file exists!');
fs.appendFileSync(this.userDataAbsPath, json);
} else {
console.log('file not exists!');
fs.writeFile(this.userDataAbsPath, json, (error) => {
if (error !== null) {
console.log(error);
}
});
}
this.postUsers();
},
// PRINTS DATA FROM userData.json TO DOM
postUsers() {
},
},
};
How can i fix this?
Problem is, appendFile method is no concat method. It just add some text after another.
You must concat your json with Object.assign first.
createOrLoadJSON(data) {
const fs = require('fs');
if (fs.existsSync(this.userDataAbsPath)) {
console.log('file exists!');
const existingJSON = fs.readFileSync(this.userDataAbsPath, "utf8"); // read file and return encoded value
const newJSON = Object.assign(JSON.parse(existingJSON), data); // concatenate file value and new data
fs.writeFile(this.userDataAbsPath, JSON.stringify(newJSON, null, 4)); // rewrite file
} else {
console.log('file not exists!');
fs.writeFile(this.userDataAbsPath, JSON.stringify(data, null, 4), (error) => { // if file does not exist stringify data here
if (error !== null) {
console.log(error);
}
});
}
this.postUsers();
},
Working example:
// proper concat with Object.assign
var assign = {
foo: 'bar'
};
var assign2 = {
bar: 'baz'
};
var assign3 = Object.assign(assign, assign2);
console.log('Object assign: ', assign3);
// appendFile look more like this
var append = {
foo: 'bar'
};
var append2 = {
bar: 'baz'
};
var append3 = JSON.stringify(append) + JSON.stringify(append2);
console.log('fs.appendFile: ', append3);

Categories

Resources