How to replace old json object using NodeJS with a new updated object ?
Right now when i update the json file it saves the new data with the old one.
JSON :
[ {
"id": 1,
"name": "Sven",
"phone": "123123"
},
{
"id": 2,
"name": "Martin",
"phone": "2342342"
} ]
Here is my code :
var operation = POST.operation; // POST request comes with operation = update/insert/delete
if (operation == 'update') {
fs.readFile("file.json", "utf8", function (err, data) {
var jsonFileArr = [];
jsonFileArr = JSON.parse(data); //Parse the data from JSON file
var haveId = jsonFileArr.some(function (obj){ // Checks if the POST request have the same id as JSON file
return obj.id == POST.id;
})
if (haveId) { // if true
var updateData = []; // Array with POST data
updateData.push({
id: POST.id,
name: POST.name,
phone: POST.phone,
})
jsonFileArr.push(updateData);
var newUsers = JSON.stringify(jsonFileArr);
fs.writeFile("file.json", newUsers, "utf8");
console.log(err);
}
})
}
I should probably use delete object but how can i specify what object should be removed ?
So when i update data with id 1 it would delete the old id / Name / phone and write the new data.
My assumption base on your question is that you have multiple objects in one file. So the easy way to work around this would be to
if (operation == 'update') {
fs.readFile("file.json", "utf8", function (err, data) {
var jsonFileArr = [];
jsonFileArr = JSON.parse(data); //Parse the data from JSON file
var haveId = jsonFileArr.some(function (obj){ // Checks if the POST request have the same id as JSON file
return obj.id == POST.id;
})
if (haveId) { // if true
var updateData = []; // Array with POST data
updateData.push({
id: POST.id,
name: POST.name,
phone: POST.phone,
})
for(let Arr of jsonFileArr){
if (Arr.id == POST.id){
let currentIndex = jsonFileArr.indexOf(Arr);
jsonFileArr.splice(currentIndex,1,updateData) //removing the old object and adding the new one
}
}
var newUsers = JSON.stringify(jsonFileArr);
fs.writeFile("file.json", '', "utf8",function(err,res){ //Making the file empty
if(!err){
fs.writeFile("file.json", newUsers, "utf8",function(err,res){ //Writing the whole object back
if(err)console.log(err);
console.info(res);
});
}else{
console.log(err);
}
});
}
})
}
I think this is better instead of using some, get the matching index and replace directly.
var jsonFileArr = JSON.parse(data); //Parse the data from JSON file
var foundId = jsonFileArr.findIndex(function (obj){ // Checks if the POST request have the same id as JSON file
return obj.id == POST.id;
});
if (foundId >= 0) {
jsonFileArr[foundId] =
{
id: POST.id,
name: POST.name,
phone: POST.phone,
}
}
.... and then write back to file
Related
So I have an array which looks like this:
[
{ TransactionValues: '50.00' },
{ TransactionValues: '-77.43' },
{ TransactionValues: '-20.23' },
{ TransactionValues: '200.23' }
]
I am trying to find a way to target the monetary value and create a variable based on the sum of these. When I try to target the "50.00" for example I get "Undefined" and it's still an array.
I'm not exactly sure how I can target it specifically, is it possible? Any help would be appreciated
As per the comments here is the full code (be wary I'm still learning so it's not elegant):
var fs = require('fs');
var parse = require('csv-parse');
var transactionValues = []; //Need an array to hold transactions
var currentTrans = [];
var savingsTrans = [];
//constuctor for transactions
function addData (id, accountType, initiatorType, dateTime, transactions) {
var data = {
"AccountID" : id,
"AccountType" : accountType,
"InitiatorType" : initiatorType,
"DateTime" : dateTime,
"TransactionValues" : transactions
}
transactionValues.push(data); //should add a new line
}
function logTrans (accountType, transactions) {
if (accountType == "CURRENT") {
var cTrans = {
"TransactionValues" : transactions
}
currentTrans.push(cTrans);
}
else {
var sTrans = {
"TransactionValues" : transactions
}
savingsTrans.push(sTrans);
}
};
//parses the csv file, loops each row and adds it to the transactionValue array
var parser = parse({columns: true}, function (err, results) {
console.table(results);
for (const row of results) {
addData(row.AccountID, row.AccountType, row.InitiatorType, row.DateTime, row.TransactionValue );
logTrans(row.AccountType, row.TransactionValue);
}
console.log(transactionValues);
console.log(currentTrans);
console.log(savingsTrans);
});
fs.createReadStream(__dirname+'/testData/customer-1234567-ledger.csv').pipe(parser)
not completely following but at the end of the day you have an array like data below.
you can use filter to target the attribute you want.
you can use map to pull out just the values.
you can use reduce to sum them all up.
run the snippet below to see each step
const data = [
{ TransactionValues: '50.00', AccountType: 'CURRENT' },
{ TransactionValues: '-77.43', AccountType: null},
{ TransactionValues: '-20.23', AccountType: 'CURRENT' },
{ TransactionValues: '200.23', AccountType: null }
];
const CurrentTrans = data.filter((x) => x.AccountType === 'CURRENT');
const SavingTrans = data.filter((x) => x.AccountType !== 'CURRENT');
console.log('CurrentTrans');
console.log(CurrentTrans);
console.log('SavingTrans');
console.log(SavingTrans);
const CurrentTransValues = CurrentTrans.map((x) => parseFloat(x.TransactionValues));
const SavingTransValues = SavingTrans.map((x) => parseFloat(x.TransactionValues));
console.log('CurrentTransValues');
console.log(CurrentTransValues);
console.log('SavingTransValues');
console.log(SavingTransValues);
const TotalCurrentValues = CurrentTransValues.reduce((sum, x) => sum + x);
const TotalSavingValues = SavingTransValues.reduce((sum, x) => sum + x);
console.log('TotalCurrentValues');
console.log(TotalCurrentValues.toFixed(2));
console.log('TotalSavingValues');
console.log(TotalSavingValues.toFixed(2));
So I may have fixed it by using parseFloat in my addData and logTrans functions:
function addData (id, accountType, initiatorType, dateTime, transactions) {
var data = {
"AccountID" : id,
"AccountType" : accountType,
"InitiatorType" : initiatorType,
"DateTime" : dateTime,
"TransactionValues" : parseFloat(transactions)
}
transactionValues.push(data); //should add a new line
}
function logTrans (accountType, transactions) {
if (accountType == "CURRENT") {
var cTrans = parseFloat(transactions);
currentTrans.push(cTrans);
}
else {
var sTrans = parseFloat(transactions);
savingsTrans.push(sTrans);
}
};
Now that seems to of worked. So I can use the "Sum values of objects in array" as suggested before. Thank you everyone :)
How do I create the data array from my second api call result into the format I want?
I have a code like this
var github = require('octonode');
var client = github.client();
var userName = "octocat";
var repoName = "";
var branchName = "";
var data = [];
var branches = [];
client.get('/users/'+userName+'/repos', {}, function (err, status, body, headers) {
body.forEach(function(obj) {
repoName = obj.name;
//==============================
client.get('repos/'+userName+'/'+repoName+'/branches', {}, function (errx, statusx, bodyChild, headersx) {
bodyChild.forEach(function(objChild) {
branchName = objChild.name;
});
});
});
});
I have received repoName and branchName data as well.
I want my data format like
How to use
data.push({
name: repoName,
branches: 'branchName loooping here for every repoName'
});
so branches repetition data can be contained in my branches tag
Thank you
I guess you can do something like this:
var data = [];
client.get('/users/'+userName+'/repos', {}, function (err, status, body, headers) {
body.forEach(function(obj) {
repoName = obj.name;
client.get('repos/'+userName+'/'+repoName+'/branches', {}, function (errx, statusx, bodyChild, headersx) {
let elem = {"name": repoName, "branches": []}; //create json object for each repo
bodyChild.forEach(function(objChild) {
elem.branches.push(objChild.name); //push all branchs to that elem
});
data.push(elem); // add the elem to the data array
});
});
});
So in this case data is an object, that has a property name which is string, and another property branches which is array. If you want to push data to the property branches you can just call the push() function on it.
Please check the example below:
let data = {
name: "repoName",
branches: [
{
name: "foo"
}
]
}
data.branches.push(
{
name: "bar"
}
);
console.log(data);
I got a multidimensional array where each array inside is a combination of all the user info (put in by the user)
I want the user to be able to modify his information, such as his name or phone number, so that the next time the user logs in, his new info is displayed.
The userList is saved on JSON, the idea would be for the changes to be saved and loaded from there.
Example:
if my array has
userList= JSON.parse(localStorage.getItem('userListLS')
if(userList == null) {
userList =
[
['Isaac', 'Garth', 'IsaacG11#email.com', 'Mazda']
['Matthew', 'Miller', 'mmiller21#mail.com', 'Volvo']
]
}
and lets say Isaac is logged in, and he wishes to change his name to Gabriel and change email to IsaacG21#email.com, so that the new array would be:
userList= [
['Gabriel', 'Garth', 'IsaacG21#email.com', 'Mazda']
['Matthew', 'Miller', 'mmiller21#mail.com', 'Volvo']
]
how could this be accomplished?
Only JS and HTML please.
The easiest approach would be to modify the data directly using index selectors:
This approach mutates the initial array instead of returning a new reference, which can lead to unwanted side effects.
function modifyData(data, userId, fieldId, newData) {
if(data[userId] && data[userId][fieldId]) {
data[userId][fieldId] = newData;
}
return data;
}
// example
var userList= [
['Isaac', 'Garth', 'IsaacG11#email.com', 'Mazda'],
['Matthew', 'Miller', 'mmiller21#mail.com', 'Volvo']
];
const FIELD_NAME_ID = 0;
const USER_ID = 0;
var modified = modifyData(userList, USER_ID, FIELD_NAME_ID, 'Morty')
console.log('initial', userList);
console.log('modified', modified);
An immutable approach would be:
function modifyData(data, userId, fieldId, newData) {
if(data && data[userId] && data[userId][fieldId]) {
var _data = data.slice();
var _user = data[userId].slice();
_user[fieldId] = newData;
_data[userId] = _user;
return _data;
}
return data;
}
// example
var userList= [
['Isaac', 'Garth', 'IsaacG11#email.com', 'Mazda'],
['Matthew', 'Miller', 'mmiller21#mail.com', 'Volvo']
];
const FIELD_NAME_ID = 0;
const USER_ID = 0;
var modified = modifyData(userList, USER_ID, FIELD_NAME_ID, 'Morty');
console.log('initial', userList);
console.log('modified', modified);
But you should consider changing your structure to a dictionary of objects:
{
user1: { name: 'Isaac', lastName: 'Garth' /* ... */ }
// ...
}
or an array of objects (but then you have to find the right user in the array:
[
{ name: 'Isaac', lastName: 'Garth' /* ... */ }
// ...
]
A good usecase for a Map:
const users = new Map(
userList.map(
( [first,name,email,car]) => [first, { first,name,email,car }]
)
);
So to change a certains users email:
users.get("Isaac").email = "test # gmail.com";
A few more usecases:
//set a whole user:
users.set("Jack", {
first:" Jack",
name:"Daniels",
email:"me#example.com",
car:"VW"
});
//get an array of users again
userList = [...users.values()];
I am using a JSON file to store some data, this is my current structure.
created by my code attempts.
{
"Username": "ozziep",
"ProjectID": "ExpressJS",
"TimeStamp": "2016-12-30T19:54:52.418Z",
"Comments": "hello world how are we today?"
}
{
"Username": "alex",
"ProjectID": "Foo",
"TimeStamp": "2016-12-30T19:55:07.138Z",
"Comments": "we need to double check that this json system works. "
}
I generate the JSON like this, not the best code, still learning JS.
var time = new Date();
var project_id = data.postid;
var comment = data.commentdata;
var usercommented = data.usercoment
fs.readFile("comments.json", 'utf-8', function(err, data) {
if (err) {
throw err;
}
if (typeof data !== "undefined") {
var jsongrid = {
"Username": usercommented,
"ProjectID": project_id,
"TimeStamp": time,
"Comments": comment
}
//this all works, for now. and will hopefully stay that way.
console.log(commentsdata)
var JSONStringed = JSON.stringify(jsongrid, null, 4) //turning the json grid into JSON, and prettyprinting it. generates correct JSON
var commentsdata = data; //current JSON on file.
var CompiledJSON = "\n"+commentsdata + "\n "+JSONStringed;//adding the new to the old.
var bCompiledJSON = "["+CompiledJSON+"\n]"
fs.truncate('comments.json', 0, function(){console.log('comments file can now be written to.')})
var time = new Date();
var project_id = data.postid;
var comment = data.commentdata;
var usercommented = data.usercoment
fs.readFile("comments.json", 'utf-8', function(err, data) {
if (err) {
throw err;
}
if (typeof data !== "undefined") {
var jsongrid = {
"Username": usercommented,
"ProjectID": project_id,
"TimeStamp": time,
"Comments": comment
}
//this all works, for now. and will hopefully stay that way.
console.log(commentsdata)
var JSONStringed = JSON.stringify(jsongrid, null, 4) //turning the json grid into JSON, and prettyprinting it. generates correct JSON
var commentsdata = data; //current JSON on file.
var CompiledJSON = "\n"+commentsdata + "\n "+JSONStringed;//adding the new to the old.
var bCompiledJSON = "["+CompiledJSON+"\n]"
fs.truncate('comments.json', 0, function(){console.log('comments file can now be written to.')})
// var jsonsearched = CompiledJSON.hasOwnProperty("Vortex.API")
console.log(CompiledJSON[2])
// var CompiledJsonPretty = JSON.stringify(CompiledJSON, null, 4); //pretty printing this creation.
console.log("A user has submitted a comment to post " + project_id) //logging.
console.log("Generating JSON")
console.log(CompiledJSON)
socket.emit("added_comment")
// var json_temp = {"Comments":{"Username":usercommented,"CommentData":comment,"date":time,"ProjectID":project_id}}
//var jsondata = JSON.stringify(json_temp)
console.log("--------------------------------------------")
console.log("Temp JSON generated - value: \n\n" + JSONStringed)
if (typeof JSONStringed !== "undefined") {
fs.writeFile("comments.json", bCompiledJSON, function(err) {
if (!err) {
//verify data has been written, cause comments are important!
fs.readFile("comments.json", 'utf-8', function(err, data) {
if (err) {
throw err;
}
if (data !== CompiledJSON) {
console.log("Writing comment JSON to file failed.")
console.log("- \n if (data) !== JSONStringed; failed. ")
} else{
socket.emit("added_comment")
}
})
} else {
throw err;
}
})
}
}
})
// console.log(JSON.stringify(json))
})
I do plan on minimising it a little, its too much code for something so simple, any way, it creates the JSON from the jsongrid writes to file, but the only problem is it writes them on top of each other, as shown above, this does not work because I am not able to pick out a block by name or what ever, I have tried just reading the file, erasing it, adding the [] to it, then writing the JSON to file again, but that just adds lots of [] around, which does not work either, I wanted to access the data in the JSON like, foo[1].Username for example. what is the best way to achieve this?
Simplest solution is to store an array of JSON objects in your file.
Keep in mind that although JSON stands for "JavaScript Object Notation," an array is also valid JSON. So your file could look like this:
[
{
"Username": "ozziep",
"ProjectID": "ExpressJS",
"TimeStamp": "2016-12-30T19:54:52.418Z",
"Comments": "hello world how are we today?"
},
{
"Username": "alex",
"ProjectID": "Foo",
"TimeStamp": "2016-12-30T19:55:07.138Z",
"Comments": "we need to double check that this json system works. "
}
]
Then to add, remove, lookup objects in the file, you read the entire file in, parse it, and do what you need.
Adding a comment:
var time = new Date();
var project_id = data.postid;
var comment = data.commentdata;
var usercommented = data.usercoment
// Read in whole file
fs.readFile("comments.json", 'utf-8', function(err, data) {
if (err) {
throw err;
}
if (typeof data !== "undefined") {
// Parse the current contents of the file into array
var currentComments = JSON.parse(data);
// Create our new comment
var newComment = {
"Username": usercommented,
"ProjectID": project_id,
"TimeStamp": time,
"Comments": comment
};
// Push it onto the end of the existing comments
currentComments.push(newComment);
// Convert comments back to string to be written
var stringifiedComments = JSON.stringify(currentComments);
// Write back to file!
fs.writeFile("comments.json", stringifiedComments, function (err) {
console.log(err);
});
}
}
How can i find data that is related to the already known data?
( I'm a newb. )
For example here is my json :
[
{ "id": "1", "log": "1","pass": "1111" },
{ "id": 2, "log": "2","pass": "2222" },
{ "id": 3, "log": "3","pass": "3333" }
]
Now i know that "log" is 1 and i want to find out the data "pass" that is related to it.
i've tried to do it so :
The POST request comes with log and pass data , i search the .json file for the same log value and if there is the same data then i search for related pass
fs.readFile("file.json", "utf8", function (err, data) {
var jsonFileArr = [];
jsonFileArr = JSON.parse(data); // Parse .json objekts
var log = loginData.log; // The 'log' data that comes with POST request
/* Search through .json file for the same data*/
var gibtLog = jsonFileArr.some(function (obj) {
return obj.log == log;
});
if (gotLog) { // If there is the same 'log'
var pass = loginData.pass; // The 'pass' data that comes with POST request
var gotPass = jsonFileArr.some(function (obj) {
// How to change this part ?
return obj.pass == pass;
});
}
else
console.log("error");
});
The problem is that when i use
var gotPass = jsonFileArr.some(function (obj) {
return obj.pass == pass;
});
it searches through the whole .json file and not through only one objekt.
Your main problem is that .some() returns a boolean, whether any of the elements match your predicate or not, but not the element itself.
You want .find() (which will find and return the first element matching the predicate):
const myItem = myArray.find(item => item.log === "1"); // the first matching item
console.log(myItem.pass); // "1111"
Note that it is possible for .find() to not find anything, in which case it returns undefined.
The .some() method returns a boolean that just tells you whether there is at least one item in the array that matches the criteria, it doesn't return the matching item(s). Try .filter() instead:
var jsonFileArr = JSON.parse(data);
var log = loginData.log;
var matchingItems = jsonFileArr.filter(function (obj) {
return obj.log == log;
});
if (matchingItems.length > 0) { // Was at least 1 found?
var pass = matchingItems[0].pass; // The 'pass' data that comes with the first match
} else
console.log("error"); // no matches
Using ES6 Array#find is probably the easiest, but you could also do (among other things)
const x = [{
"id": "1",
"log": "1",
"pass": "1111"
}, {
"id": 2,
"log": "2",
"pass": "2222"
}, {
"id": 3,
"log": "3",
"pass": "3333"
}];
let myItem;
for (let item of x) {
if (item.log === '1') {
myItem = item;
break;
}
}
console.log(myItem);