How do I extract data from df ~ onto my website? - javascript

I am creating a pie chart which shows how much disk space is available/used on my linux box. However, I am unsure how to parse the data onto a microservice url. Help will greatly be appreciated.
Here is what I have at the moment:
Router:
router.route('/linux_disk').get(disk.get_linux_disk)
Controller:
function get_linux_disk(req, res, next) {
try {
var cmd = `df ~`;
exec(cmd)
rows = [];
rows.push({"Command": cmd});
if (rows.length >= 1) {
res.status(200).json(rows);
} else {
res.status(404).end();
}
} catch (err) {
next(err);
}
}

You might try the approach below, we create a row object for each entry that the df
command creates. Once you have this you should be able to create your pie chart from this:
const { exec } = require('child_process');
const { promisify } = require('util');
const execPromise = promisify(exec);
async function get_linux_disk(req, res, next) {
try {
const result = await execPromise(`df ~`)
const lines = result.stdout.split("\n");
const keys = lines[0].split(/\s+/ig);
// Skip the header row when assigning objects..
const rows = lines.slice(1).map(line => {
// Parse each line..
const values = line.split(/\s+/ig);
return keys.reduce((o, k, index) => {
o[k] = values[index];
return o;
}, {})
});
res.status(200).json(rows);
} catch (err) {
res.status(500).send(err.message);
}
}
The resulting JSON will look a bit like so :
[
{
"Filesystem": "/dev/sda1",
"1K-blocks": "10253588",
"Used": "7971516",
"Available": "1741504",
"Use%": "83%",
"Mounted": "/"
}
]

Related

How to read multiple json file using fs and bulk request

I'm using elasticsearch search engine with my react app, I was reading one file at the backend as you see in the code and it work perfectly, but now I want to read three different JSON files to three different indexes using the "fs" package and bulk request, can you please help me?
the code:
// Start reading the json file
fs.readFile("DocRes.json", { encoding: "utf-8" }, function (err, data) {
if (err) {
throw err;
}
// Build up a giant bulk request for elasticsearch.
bulk_request = data.split("\n").reduce(function (bulk_request, line) {
var obj, ncar;
try {
obj = JSON.parse(line);
} catch (e) {
console.log("Done reading 1");
return bulk_request;
}
// Rework the data slightly
ncar = {
id: obj.id,
name: obj.name,
summary: obj.summary,
image: obj.image,
approvetool: obj.approvetool,
num: obj.num,
date: obj.date,
};
bulk_request.push({
index: { _index: "ncar_index", _type: "ncar", _id: ncar.id },
});
bulk_request.push(ncar);
return bulk_request;
}, []);
// A little voodoo to simulate synchronous insert
var busy = false;
var callback = function (err, resp) {
if (err) {
console.log(err);
}
busy = false;
};
// Recursively whittle away at bulk_request, 1000 at a time.
var perhaps_insert = function () {
if (!busy) {
busy = true;
client.bulk(
{
body: bulk_request.slice(0, 1000),
},
callback
);
bulk_request = bulk_request.slice(1000);
console.log(bulk_request.length);
}
if (bulk_request.length > 0) {
setTimeout(perhaps_insert, 100);
} else {
console.log("Inserted all records.");
}
};
perhaps_insert();
});
You can create multiple promises for each file read and feed it to the elastic search bulk_request.
const fsPromises = require('fs').promises,
files = ['filename1', 'filename1'],
response = [];
const fetchFile = async (filename) => {
return new Promise((resolve, reject) => {
const path = path.join(__dirname, filename);
try {
const data = await fsPromises.readFile(path)); // make sure path is correct
resolve(data);
} catch (e) {
reject(e)
}
});
files.forEach((fileName) => results.push(fetchFile()));
Promise.all(results).then(data => console.log(data)).catch(e => console.log(e));
}
Once you get data from all the promises pass it to the elastic search.

Updating Yaml File through Patch method not working

I have a Yaml file(layouts.yaml) of this format from which i want to perform crud Operations through REST Api:
Layouts:
-
Name: Default Layout
LayoutId : 1
ConfiguredSegments:
LiveA :
Height : 100
Id : LiveA
Ref1A :
Height : 100
Id : Ref1A
My controller Function to update a layout based on layout Id(I tried 2 ways which wont work):
1st way: //This does not seem to work
const raw = fs.readFileSync("layouts.yaml", 'utf8');
const layoutData = YAML.load(raw);
//function to update specific layout based on LayoutId
export const updateSpecificLayout = (req, res)=>{
const { id } = req.params;
const { ConfiguredSegments } = req.body;
const getLayoutList = JSON.parse(JSON.stringify(layoutData));
getLayoutList.forEach(element => {if(element.LayoutId == id) element.ConfiguredSegments =
ConfiguredSegments
});
let yaml = YAML.dump(getLayoutList);
fs.writeFileSync("layouts.yaml", yaml, function (err,file){
if(err) throw err;
console.log(`Layout with the id:${id} has been updated`);
})
}
2nd way://This does not seem to work as well
const raw = fs.readFileSync("layouts.yaml", 'utf8');
const layoutData = YAML.load(raw);
//function to update specific layout based on LayoutId
export const updateSpecificLayout = (req, res)=>{
const { id } = req.params;
const { ConfiguredSegments } = req.body;
const getLayout = JSON.parse(JSON.stringify(layoutData));
const foundLayout = getLayout.Layouts.find((layout) => layout.LayoutId == id);
if(ConfiguredSegments)foundLayout.ConfiguredSegments = ConfiguredSegments;
console.log(`Layout with the id:${id} has been updated`);
}
Through Postman i am testing my api with patch request with the following body:
{
"ConfiguredSegments": {
"Ref2A": {
"Height": 100,
"Id": "LiveA"
},
"Ref3A": {
"Height": 100,
"Id": "Ref1A"
}
}
}
But the yaml file is not getting updated.Any other ways to achieve this ?
You can try using this method.
Define a function which will be able to find and replace the object you are looking for.
Your controller function:
export const updateSpecificLayout = (req, res)=>{
const { id } = req.params;
const { ConfiguredSegments } = req.body;
const getLayoutList = JSON.parse(JSON.stringify(layoutData));
const layoutToBeUpdated = getLayoutList.Layouts.find((layout) => layout.LayoutId == id );
findAndReplace(getLayoutList.Layouts,layoutToBeUpdated.ConfiguredSegments,ConfiguredSegments)
let yaml = YAML.dump(getLayoutList);
fs.writeFileSync("layouts.yaml", yaml, function (err,file){
if(err) throw err;
console.log(`Layout with the id:${id} has been updated`);
})
}
The helper function which can find and replace the data.
// Helper function to update layout data
function findAndReplace(object, value, replacevalue) {
for (var x in object) {
if (object.hasOwnProperty(x)) {
if (typeof object[x] == 'object') {
findAndReplace(object[x], value, replacevalue);
}
if (object[x] == value) {
object["ConfiguredSegments"] = replacevalue;
break;
}
}
}
}

Multiple Objects

I have a weird looking object that I would like to turn into an object with multiple objects. (what I mean by multiple objects in nested objects) The current object looks like this:
{
'test.txt': "This is a test\r\n\r\nI hope it'll work",
'testy.js': 'console.log("thonk");\r\n',
'thonk\\i swear\\egg.txt': 'am going to be happy?',
'thonk\\pls work.txt': 'dasdas'
}
And I want it to look like this:
{
"test.txt": "This is a test\r\n\r\nI hope it'll work",
"testy.js": "console.log('thonk');\r\n",
"thonk": {
"I swear": {
"egg.txt": "am going to be happy?"
},
"pls work.txt": "dasdas"
}
}
Edit:
here's what my code is (if u need it):
var fs = require("fs");
var path = require("path");
var walk = function (dir, done) {
var results = [];
fs.readdir(dir, function (err, list) {
if (err) return done(err);
var i = 0;
(function next() {
var file = list[i++];
if (!file) return done(null, results);
file = path.resolve(dir, file);
fs.stat(file, function (err, stat) {
if (stat && stat.isDirectory()) {
walk(file, function (err, res) {
results = results.concat(res);
next();
});
} else {
results.push(file);
next();
}
});
})();
});
};
var root = "test";
var data = {};
walk("test", function (err, results) {
if (err) throw err;
for (i in results) {
data[
results[i].replace(__dirname + "\\" + root + "\\", "")
] = fs.readFileSync(results[i], "utf8");
}
console.log(data);
});
This can be done by combining Object.keys() and Array.reduce() as follows:
const source = {
'test.txt': "This is a test\r\n\r\nI hope it'll work",
'testy.js': 'console.log("thonk");\r\n',
'thonk\\i swear\\egg.txt': 'am going to be happy?',
'thonk\\pls work.txt': 'dasdas'
}
const result = Object.keys(source).reduce((target, k) => {
const keys = k.split('\\');
if (keys.length == 1) {
target[k] = source[k];
} else {
const nestedObj = target[keys[0]] || {};
keys.slice(1).reduce((o, nestedKey, i) => {
const value = i < keys.length -2 ? {} : source[k];
o[nestedKey] = value;
return value;
}, nestedObj);
target[keys[0]] = nestedObj;
}
return target;
}, {});
console.log(result);
So you should create new object, then go through each element, combining items that you need and placing them to a new one.

How to update a global variable with a sqlite query in javascript?

I want to update a globally declared variable after sqlite query,
but I cant get it to work,
I have read that it might be related to asynchronous functions but I have no idea how to implement callbacks and stuff in this example,
can you guys help please. Here is the code:
const sqlite3 = require('sqlite3').verbose();
const dbPath = './src/db/db.sqlite3';
let db = new sqlite3.Database(dbPath, (err) => {
if (err) {
console.error(err.message);
}
console.log('Connected to database.');
});
let number = null;
let rowsExist = null;
db.get("select count(*) from PRICE", [], (err, row) => {
if (err) {
console.error(err.message)
}
else {
rowsExist = Object.values(row)[0];
console.log(rowExist) //this works but outside the function it doesnt get updated
}
});
// here rowExist remains the same after query
if (rowsExist === null) {
number = 1
}
else {
db.get("SELECT number FROM PRICE ORDER BY number DESC LIMIT 1", [], (err, row) => {
if (err) {
console.error(err.message)
}
else {
number = Object.values(row)[0] + 1
}
})
};

How to Read a Objects in a .txt Filepath in Javascript When Given the Filepath

I have a filepath that leads to a .txt file that has a number of objects in it. I'm trying to write a JavaScript function that will take in this filepath as an argument and allow me to access and iterate over these objects, but everything I've tried and found online doesn't work. Is there a technique to accomplish this task?
I'm just trying to in vs code. The contents of the .txt file are:
{"food": "chocolate", "eaten", true}
{"food": "hamburger", "eaten", false}
{"food": "peanuts", "eaten", true}
{"food": "potato", "eaten", true}
I tried just iterating over the file path as an argument but that didn't work and it just returned the file path itself, and I have had no luck with any of the read file solutions on this site.
I know in Ruby this is easily accomplishable through:
File.open("my/file/path", "r") do |f|
f.each_line do |line|
puts line
end
end
But I am confused about the JavaScript solution.
const fs = require('fs');
fs.readFile('txtFilePath', 'utf8', (err, data) => {
const toValidJSON = data.replace(/"eaten",/g, '"eaten":').replace(/\}[\r\n]+\{/g, '},{');
const validJSON = `[${toValidJSON}]`
const arr = JSON.parse(validJSON);
console.log(arr)
});
for this question only
In Node.js, if you want a streaming approach, extend a Transform stream to parse JSON between line separators:
const { Transform } = require('stream')
module.exports = class DelimitedJSONTransform extends Transform {
constructor ({ delimiter = '\n', encoding = 'utf8', reviver = null } = {}) {
super({ readableObjectMode: true })
this._delimiter = delimiter
this._encoding = encoding
this._reviver = reviver
this._buffer = ''
}
_transform (chunk, encoding, callback) {
switch (encoding) {
case 'buffer':
this._buffer += chunk.toString(this._encoding)
break
default:
this._buffer += chunk
break
}
const lines = this._buffer.split(this._delimiter)
const latest = lines.pop()
try {
while (lines.length > 0) {
this.push(JSON.parse(lines.shift(), this._reviver))
}
callback()
} catch (error) {
callback(error)
} finally {
lines.push(latest)
this._buffer = lines.join(this._delimiter)
}
}
_flush (callback) {
if (!this._buffer.trim()) {
return
}
const lines = this._buffer.split(this._delimiter)
try {
while (lines.length > 0) {
this.push(JSON.parse(lines.shift(), this._reviver))
}
callback()
} catch (error) {
callback(error)
}
}
}
Usage
const { createReadStream } = require('fs')
const DelimitedJSONTransform = require('./transform') // or whatever you named the file above
let fileStream = createReadStream('jsons.txt')
let jsonTransform = fileStream.pipe(new DelimitedJSONTransform())
jsonTransform
.on('data', object => { console.log(object) })
.on('error', error => { console.error(error) })

Categories

Resources