Get all json files from node folder and find specific attr inside - javascript

i've folder in my node app with several json files (can be more then 10 ) and I need from validation aspects to read them and find specific property and if this property occur in more than one json file throw an error,what is the best way to do it from performance and efficiency aspects
for example my folder called plugins
and all the json are built like following
json1
{
"action": [
{
"delete": {
"path": "deleteFile",
"providedAction":"Del"
},
{
"update": {
"path": "updateFile",
"providedAction":"UPD"
}
}
]
}
this is valid json since providedAction = add is not exist in other json **
json2
{
"action": [
{
"add": {
"path": "addFile",
"providedAction":"Add"
}
}
]
}
this is not valid json since providedAction = UPD the action is already exist
JSON 3
{
"action": [
{
{
"update": {
"path": "updateFile",
"providedAction":"UPD"
}
}
]
}
I need to verify that just this json have the action "Del",if more than one json have this trow error,how its recommended to do it?

Ok, here is the code. If you don't understand something let me know and I will glad to help you!
var glob = require("glob");
var fs = require("fs");
var _inArray = function(needle, haystack) {
for(var k in haystack) {
if(haystack[k] === needle) {
return true;
}
}
return false;
}
glob("json/*.json", function(err, files) { // read the folder or folders if you want: example json/**/*.json
if(err) {
console.log("cannot read the folder, something goes wrong with glob", err);
}
var matters = [];
files.forEach(function(file) {
fs.readFile(file, 'utf8', function (err, data) { // Read each file
if(err) {
console.log("cannot read the file, something goes wrong with the file", err);
}
var obj = JSON.parse(data);
obj.action.forEach(function(crud) {
for(var k in crud) {
if(_inArray(crud[k].providedAction, matters)) {
// do your magic HERE
console.log("duplicate founded!");
// you want to return here and cut the flow, there is no point in keep reading files.
break;
}
matters.push(crud[k].providedAction);
}
})
});
});
});
JSON 1:
{"action": [
{
"delete": {
"path": "deleteFile",
"providedAction": "Del"
}
},
{
"update": {
"path": "updateFile",
"providedAction": "UPD"
}
}
]
}
JSON 2:
{
"action": [
{
"add": {
"path": "addFile",
"providedAction": "Add"
}
}
]
}
JSON 3:
{
"action": [
{
"update": {
"path": "updateFile",
"providedAction": "UPD"
}
}
]
}

Not the prettiest code I've written, but here it is:
// Require the nodejs file system library
var fs = require('fs');
var path = '/usr/local/var/jsons';
var delCounter = 0;
// Readdir reads a path and gives an array of filenames
// to the callback handleFiles.
fs.readdir(path, handleFiles);
function handleFiles (err, files) {
if (err) throw err;
var i;
var jsonFilePattern=/\.[json]+$/i;
var fileName;
var filePath;
// Tells fs to read an utf-8 file.
var fileReadOptions = {
'encoding':'utf-8'
};
for (i = 0; i < files.length; ++i) {
fileName = files[i];
// Check if the file has a .json extension
if (fileName.match(jsonFilePattern)) {
filePath = path + '/' + fileName;
// Open the file as utf-8 and call handleJsonFile back
// when done reading.
fs.readFile(filePath, fileReadOptions, handleJsonFile);
}
}
}
function handleJsonFile (err, data) {
if (err) throw err;
var dataObject = JSON.parse(data);
var i;
var action;
// Loop through all possible action.
for (i = 0; i < dataObject.action.length; ++i) {
action = dataObject.action[i];
if (action.delete &&
action.delete.providedAction &&
action.delete.providedAction === 'Del')
{
// If there is a 'Del', add it to the counter.
++delCounter;
}
}
if (delCounter > 1) {
throw new Exception('Jsons  not valid.');
}
}

Something like this perhaps?, enjoy!!!
npm install glob
JSON 1
module.exports = {
"action": [{
"delete": {
"path": "deleteFile",
"action":"Del"
}
}]
}
CODE
(function() {
var glob = require("glob");
glob("path/to/*.js", function(er, files) {
if(er) return;
var x = 0;
files.forEach(function(file) {
require(file)['action'].forEach(function(act) {
if(act.delete.action && act.delete.action == "Del") x++;
});
});
if(x > 1) throw new Exception(""); // or something ja!
});
})();
5am without sleep, sorry if I commit mistakes, I want to show you the way only... not for copy paste!! xD.

Using modern syntax, reduce and spread could be of great help here:
const files = readdirSync(path);
files.reduce((acc, curr) => {
const file = JSON.parse(readFileSync(path.join(path, curr), 'utf8'));
const merged = { ...acc, ...file };
// Check for destructive merging.
if (Object.keys(file).length + Object.keys(acc).length > Object.keys(merged).length) {
throw Error('Destructive merge of JSON files.');
}
return merged;
}, {});

const fs = require('fs');
const path = require('path');
// dir path that contains all your json file
const dirPath = './something/something';
const files = fs.readdirSync(dirPath);
const arr = []
files.forEach((val, i) => {
const file = JSON.parse(fs.readFileSync(path.join(dirPath, val), 'utf8'));
arr.push(file);
})
if (arr.length === files.length) {
console.log(arr)
}

Related

Multiple Objects

I have a weird looking object that I would like to turn into an object with multiple objects. (what I mean by multiple objects in nested objects) The current object looks like this:
{
'test.txt': "This is a test\r\n\r\nI hope it'll work",
'testy.js': 'console.log("thonk");\r\n',
'thonk\\i swear\\egg.txt': 'am going to be happy?',
'thonk\\pls work.txt': 'dasdas'
}
And I want it to look like this:
{
"test.txt": "This is a test\r\n\r\nI hope it'll work",
"testy.js": "console.log('thonk');\r\n",
"thonk": {
"I swear": {
"egg.txt": "am going to be happy?"
},
"pls work.txt": "dasdas"
}
}
Edit:
here's what my code is (if u need it):
var fs = require("fs");
var path = require("path");
var walk = function (dir, done) {
var results = [];
fs.readdir(dir, function (err, list) {
if (err) return done(err);
var i = 0;
(function next() {
var file = list[i++];
if (!file) return done(null, results);
file = path.resolve(dir, file);
fs.stat(file, function (err, stat) {
if (stat && stat.isDirectory()) {
walk(file, function (err, res) {
results = results.concat(res);
next();
});
} else {
results.push(file);
next();
}
});
})();
});
};
var root = "test";
var data = {};
walk("test", function (err, results) {
if (err) throw err;
for (i in results) {
data[
results[i].replace(__dirname + "\\" + root + "\\", "")
] = fs.readFileSync(results[i], "utf8");
}
console.log(data);
});
This can be done by combining Object.keys() and Array.reduce() as follows:
const source = {
'test.txt': "This is a test\r\n\r\nI hope it'll work",
'testy.js': 'console.log("thonk");\r\n',
'thonk\\i swear\\egg.txt': 'am going to be happy?',
'thonk\\pls work.txt': 'dasdas'
}
const result = Object.keys(source).reduce((target, k) => {
const keys = k.split('\\');
if (keys.length == 1) {
target[k] = source[k];
} else {
const nestedObj = target[keys[0]] || {};
keys.slice(1).reduce((o, nestedKey, i) => {
const value = i < keys.length -2 ? {} : source[k];
o[nestedKey] = value;
return value;
}, nestedObj);
target[keys[0]] = nestedObj;
}
return target;
}, {});
console.log(result);
So you should create new object, then go through each element, combining items that you need and placing them to a new one.

How do I extract data from df ~ onto my website?

I am creating a pie chart which shows how much disk space is available/used on my linux box. However, I am unsure how to parse the data onto a microservice url. Help will greatly be appreciated.
Here is what I have at the moment:
Router:
router.route('/linux_disk').get(disk.get_linux_disk)
Controller:
function get_linux_disk(req, res, next) {
try {
var cmd = `df ~`;
exec(cmd)
rows = [];
rows.push({"Command": cmd});
if (rows.length >= 1) {
res.status(200).json(rows);
} else {
res.status(404).end();
}
} catch (err) {
next(err);
}
}
You might try the approach below, we create a row object for each entry that the df
command creates. Once you have this you should be able to create your pie chart from this:
const { exec } = require('child_process');
const { promisify } = require('util');
const execPromise = promisify(exec);
async function get_linux_disk(req, res, next) {
try {
const result = await execPromise(`df ~`)
const lines = result.stdout.split("\n");
const keys = lines[0].split(/\s+/ig);
// Skip the header row when assigning objects..
const rows = lines.slice(1).map(line => {
// Parse each line..
const values = line.split(/\s+/ig);
return keys.reduce((o, k, index) => {
o[k] = values[index];
return o;
}, {})
});
res.status(200).json(rows);
} catch (err) {
res.status(500).send(err.message);
}
}
The resulting JSON will look a bit like so :
[
{
"Filesystem": "/dev/sda1",
"1K-blocks": "10253588",
"Used": "7971516",
"Available": "1741504",
"Use%": "83%",
"Mounted": "/"
}
]

can't get a correct JSON format, instead i am messing up

What do i want?
good question, isn't it? Well...
I am working on a application to calculate budgets with electron-vue.
In my App i try to save the users in a JSON file to create a opportunity to hold them after a application restart.
the JSON File should look like this:
{
"deniz": {
"salary": 1234,
},
"hüseyin": {
"salary": 4321,
}
}
What do i get?
I am getting this instead:
{
"deniz": {
"salary": 1234
}
}{
"hüseyin": {
"salary": 4321
}
}
Problem is, its a wrong JSON format. I am creating a whole new obj inside a obj.
How am i doing this?
I created a userDataControllerMixin.js to separate the logic from the component it self.
I have two InputFields in my component, 1.userName and 2.userSalary to collect the user data.
Inside my userDataControllerMixin.js:
export const userDataControllerMixin = {
data() {
return {
userDataAbsPath: 'src/data/userData.json',
};
},
mounted() {
this.getUsers();
},
methods: {
// FETCH THE userData.json
getUsers() {
const fs = require('fs');
const loadJSON = fs.readFile('src/data/userData.json', 'utf8', (err, data) => {
if (err) {
console.log(`failed to read file: ${err}`);
}
// console.log(data);
});
return loadJSON;
},
// USING THIS CONSTRUCTOR TO BUILD A JSON FORMAT
User(user, salary) {
this[user] = {
salary: Number(salary),
};
return user;
},
// GET INPUT FROM USERS INPUTBOX
getInput(inputName, inputSalary) {
const userName = this.inputName;
const userSalary = this.inputSalary;
const user = new this.User(userName, userSalary);
console.log(user);
this.createOrLoadJSON(user);
},
// CREATES A JSON WITH DATA FROM THE USERS
createOrLoadJSON(data) {
const fs = require('fs');
const json = JSON.stringify(data, null, 4);
if (fs.existsSync(this.userDataAbsPath)) {
console.log('file exists!');
fs.appendFileSync(this.userDataAbsPath, json);
} else {
console.log('file not exists!');
fs.writeFile(this.userDataAbsPath, json, (error) => {
if (error !== null) {
console.log(error);
}
});
}
this.postUsers();
},
// PRINTS DATA FROM userData.json TO DOM
postUsers() {
},
},
};
How can i fix this?
Problem is, appendFile method is no concat method. It just add some text after another.
You must concat your json with Object.assign first.
createOrLoadJSON(data) {
const fs = require('fs');
if (fs.existsSync(this.userDataAbsPath)) {
console.log('file exists!');
const existingJSON = fs.readFileSync(this.userDataAbsPath, "utf8"); // read file and return encoded value
const newJSON = Object.assign(JSON.parse(existingJSON), data); // concatenate file value and new data
fs.writeFile(this.userDataAbsPath, JSON.stringify(newJSON, null, 4)); // rewrite file
} else {
console.log('file not exists!');
fs.writeFile(this.userDataAbsPath, JSON.stringify(data, null, 4), (error) => { // if file does not exist stringify data here
if (error !== null) {
console.log(error);
}
});
}
this.postUsers();
},
Working example:
// proper concat with Object.assign
var assign = {
foo: 'bar'
};
var assign2 = {
bar: 'baz'
};
var assign3 = Object.assign(assign, assign2);
console.log('Object assign: ', assign3);
// appendFile look more like this
var append = {
foo: 'bar'
};
var append2 = {
bar: 'baz'
};
var append3 = JSON.stringify(append) + JSON.stringify(append2);
console.log('fs.appendFile: ', append3);

Unable to succesfully upload images to s3 and then view them

I'm trying to upload images to a s3 bucket as part of the application.
index.js
function upImg(req) {
if(req.files.img) {
var img = req.files.image;
var name = Math.round(Math.random()*10000).toString(); // Returns a random 5 digit number
if(myDB.uploadImg(img, name)) {
return name;
} else {
return "";
}
} else {
return "";
}
}
app.post('/newEV*', isLoggedIn, function(req, res) {
var myURL = req.path.replace('/newEV', '');
var imgPath = upImg(req);
fetch(myURL).then(function (events){
var myID;
var x = 0;
while(!myID) {
if(!events[x]) {
myID = x;
} else {
x++;
}
}
myDB.newEvent(myURL, req.body.name, req.body.desc, req.body.loc, imgPath, req.body.link, req.body.cap, req.body.date, req.body.time, myID, events);
res.redirect('/edit' + myURL);
});
});
myDB file
function signs3(file, name) {
devs3();
const s3 = new aws.S3();
const s3Params = {
Body: file,
Bucket: S3_BUCKET,
Key: name
};
s3.putObject(s3Params, function(err, data) {
if(err) {
throw err;
} else {
console.log("Data from putObject:" + JSON.stringify(data));
}
});
}
module.exports = {
uploadImg : function(file, name) {
var nName = "imgs/" + name;
console.log(nName);
signs3(file, nName);
return true;
}
}
I know that the signs3 function works because I can use it in other bits of my application to upload JSON files. Whenever I post to the URL, weirdly enough I can see in the console the 'data from putObject', however what I can't see is the nName. I don't understand this, as the console.log(nName) line should be run before the other one. When I go to look at bucket, the image hasn't uploaded (despite me getting an ETag from the console), and the page does not display it as there (I know this also works because it can display images already uploaded to the bucket).
You want to do something like this, soliciting events from the Request object created when you call putObject.
const req = s3.putObject( s3Params )
req.on('success', res => {
console.log ('upload complete! );
});
req.on ('error', res => {
console.error (res.error');
});
req.send();
Why does this appear to work differently for small files (JSON files) and large files (images)? Because the large files take longer to upload.

How to Read a Objects in a .txt Filepath in Javascript When Given the Filepath

I have a filepath that leads to a .txt file that has a number of objects in it. I'm trying to write a JavaScript function that will take in this filepath as an argument and allow me to access and iterate over these objects, but everything I've tried and found online doesn't work. Is there a technique to accomplish this task?
I'm just trying to in vs code. The contents of the .txt file are:
{"food": "chocolate", "eaten", true}
{"food": "hamburger", "eaten", false}
{"food": "peanuts", "eaten", true}
{"food": "potato", "eaten", true}
I tried just iterating over the file path as an argument but that didn't work and it just returned the file path itself, and I have had no luck with any of the read file solutions on this site.
I know in Ruby this is easily accomplishable through:
File.open("my/file/path", "r") do |f|
f.each_line do |line|
puts line
end
end
But I am confused about the JavaScript solution.
const fs = require('fs');
fs.readFile('txtFilePath', 'utf8', (err, data) => {
const toValidJSON = data.replace(/"eaten",/g, '"eaten":').replace(/\}[\r\n]+\{/g, '},{');
const validJSON = `[${toValidJSON}]`
const arr = JSON.parse(validJSON);
console.log(arr)
});
for this question only
In Node.js, if you want a streaming approach, extend a Transform stream to parse JSON between line separators:
const { Transform } = require('stream')
module.exports = class DelimitedJSONTransform extends Transform {
constructor ({ delimiter = '\n', encoding = 'utf8', reviver = null } = {}) {
super({ readableObjectMode: true })
this._delimiter = delimiter
this._encoding = encoding
this._reviver = reviver
this._buffer = ''
}
_transform (chunk, encoding, callback) {
switch (encoding) {
case 'buffer':
this._buffer += chunk.toString(this._encoding)
break
default:
this._buffer += chunk
break
}
const lines = this._buffer.split(this._delimiter)
const latest = lines.pop()
try {
while (lines.length > 0) {
this.push(JSON.parse(lines.shift(), this._reviver))
}
callback()
} catch (error) {
callback(error)
} finally {
lines.push(latest)
this._buffer = lines.join(this._delimiter)
}
}
_flush (callback) {
if (!this._buffer.trim()) {
return
}
const lines = this._buffer.split(this._delimiter)
try {
while (lines.length > 0) {
this.push(JSON.parse(lines.shift(), this._reviver))
}
callback()
} catch (error) {
callback(error)
}
}
}
Usage
const { createReadStream } = require('fs')
const DelimitedJSONTransform = require('./transform') // or whatever you named the file above
let fileStream = createReadStream('jsons.txt')
let jsonTransform = fileStream.pipe(new DelimitedJSONTransform())
jsonTransform
.on('data', object => { console.log(object) })
.on('error', error => { console.error(error) })

Categories

Resources