I would like to merge 2 objects with the same properties into an Array.
Take this for an example:
object1 = {"id":1,
"name":name1,
"children":[{"id":2,"name":name2}]
};
object2 = {"id":3,
"name":name3,
"children":[{"id":4,"name":name4}]
};
object3 = {"id":1,
"name":name1,
"children":[{"id":6,"name":name6}]
};
var result = Object.assign(result,object1,object2,object3);
Expected result:
JSON.stringify([result]) =[
{"id":1,
"name":name1,
"children":[{"id":2,"name":name2},
{"id":6,"name":name6}]
},
{"id":3,
"name":name3,
"children":[{"id":4,"name":name4}]
}
]
Actual result:
JSON.stringify([result]) = [
{"id":3,
"name":name3,
"children":[{"id":4,"name":name4}]
}
]
Seems like Object.assign() isn't the way to go... as it will overwrite, I do not want it to overwrite, I want them to merge instead. Is there a right way to do this?
As so often, Array.prototype.reduce provides a good base for an approach like e.g. this one ...
var obj1 = {
"id": 1,
"name": "name1",
"children": [{ "id": 2, "name": "name2" }]
};
var obj2 = {
"id": 3,
"name": "name3",
"children": [{ "id": 4, "name": "name4" }]
};
var obj3 = {
"id": 1,
"name": "name1",
"children": [{ "id": 6, "name": "name6" }]
};
// Expected result: [{
// "id": 1,
// "name": name1,
// "children": [
// { "id": 2, "name": "name2" },
// { "id": 6, "name": "name6" }
// ]
// }, {
// "id": 3,
// "name": "name3",
// "children": [{"id": 4, "name": "name4" }]
// }]
function mergeEquallyLabeledTypes(collector, type) {
var key = (type.name + '#' + type.id); // identity key.
var store = collector.store;
var storedType = store[key];
if (storedType) { // merge `children` of identically named types.
storedType.children = storedType.children.concat(type.children);
} else {
store[key] = type;
collector.list.push(type);
}
return collector;
}
var result = [obj1, obj2, obj3].reduce(mergeEquallyLabeledTypes, {
store: {},
list: []
}).list;
console.log('result : ', result);
.as-console-wrapper { max-height: 100%!important; top: 0; }
Edit Note
After having been informed about changed requirements, that need to deal with a nested pattern, I will change my first provided approach into a generic solution. It will be not that difficult since there is a generically repeated pattern within the data structure. Thus I just need to make the already existing reducer function self recursive. A recursion step will be triggered after having finished a complete reducing cycle on any provided list ...
var obj1 = {
"id": 1,
"name": "name1",
"children": [{ "id": 2, "name": "name2", "children": [{ "id": 8, "name": "name8" }] }]
};
var obj2 = {
"id": 3,
"name": "name3",
"children": [{ "id": 4, "name": "name4", "children": [{ "id": 9, "name": "name9" }] }]
};
var obj3 = {
"id": 1,
"name": "name1",
"children": [{ "id": 6, "name": "name6", "children": [{ "id": 10, "name": "name10" }] }]
};
var obj4 = {
"id": 3,
"name": "name3",
"children": [{ "id": 4, "name": "name4", "children": [{ "id": 11, "name": "name11" }] }]
};
function mergeEquallyLabeledTypesRecursively(collector, type, idx, list) {
var key = (type.name + '#' + type.id); // identity key.
var store = collector.store;
var storedType = store[key];
if (storedType) { // merge `children` of identically named types.
storedType.children = storedType.children.concat(type.children);
} else {
store[key] = type;
collector.list.push(type);
}
// take repetitive data patterns into account ...
if (idx >= (list.length - 1)) {
collector.list.forEach(function (type) {
// ... behave recursive, when appropriate.
if (type.children) {
type.children = type.children.reduce(mergeEquallyLabeledTypesRecursively, {
store: {},
list: []
}).list;
}
});
}
return collector;
}
var result = [obj1, obj2, obj3, obj4].reduce(mergeEquallyLabeledTypesRecursively, {
store: {},
list: []
}).list;
console.log('result : ', result);
.as-console-wrapper { max-height: 100%!important; top: 0; }
This might be what your after, please note it's not recursive now recursive. But your example data doesn't appear to be anyway.
const object1 = {"id":1,
"name":"name1",
"children":[{"id":2,"name":"name2"}]
};
const object2 = {"id":3,
"name":"name3",
"children":[{"id":4,"name":"name4"}]
};
const object3 = {"id":1,
"name":"name1",
"children":[
{"id":6,"name":"name6"},
{"id":7,"name":"name7"},
{"id":6,"name":"name6"}
]
};
function merge(arr) {
const idLinks = {};
const ret = [];
arr.forEach((r) => {
if (!idLinks[r.id]) idLinks[r.id] = [];
idLinks[r.id].push(r);
});
Object.keys(idLinks).forEach((k) => {
const nn = idLinks[k];
const n = nn[0];
for (let l = 1; l < nn.length; l ++) {
if (nn[l].children) {
if (!n.children) n.children = [];
n.children = n.children.concat(nn[l].children);
}
}
if (n.children && n.children.length) n.children = merge(n.children);
ret.push(n);
});
return ret;
}
var result = merge([object1,object2,object3]);
console.log(result);
/* There are two cases :
a) No duplicate children
b) Duplicate children either in (same object || different object|| both)
*/
/* =============== */
/* Case a) */
const util = require('util');
var object1 = {
"id": 1,
"name": "name1",
"children": [{ "id": 2, "name": "name2" }]
};
var object2 = {
"id": 3,
"name": "name3",
"children": [{ "id": 4, "name": "name4" }]
};
var object3 = {
"id": 1,
"name":"name1",
"children":[{"id":6,"name":"name6"}]
};
var arr = [object1,object2,object3];
var uniqueIds = [];
var filteredArray = [];
var uniqueId='';
arr.map((item,i,array)=>{
uniqueId =uniqueIds.indexOf(item.id);
uniqueId = uniqueId+1;
uniqueIds = [...uniqueIds,item.id];
if(!uniqueId){
filteredArray[i] = item;
}
if(uniqueId){
filteredArray[uniqueId-1]['children'] = [...(array[uniqueId-1].children),...(item.children)];
}
});
console.log(util.inspect(filteredArray,false,null));
/* ============================================
Case b)
Dealing with the worst case of having duplicate children in both same
and different objects
*/
object1 = {"id":1,
"name":'name1',
"children":[{"id":2,"name":'name2'},
{"id":2,"name":'name2'}]
};
object2 = {"id":3,
"name":'name3',
"children":[{"id":4,"name":'name4'}]
};
object3 = {"id":1,
"name":'name1',
"children":[{"id":6,"name":'name6'},
{"id":7,"name":'name7'},
{"id":2,"name":'name2'}]
};
arr = [object1,object2,object3];
uniqueIds = [];
uniqueId = '';
arr.map((item,i,array)=>{
uniqueId =uniqueIds.indexOf(item.id);
uniqueId = uniqueId+1;
uniqueIds = [...uniqueIds,item.id];
if(!uniqueId){
filteredArray[i] = item;
}
if(uniqueId){
filteredArray[uniqueId-1]['children'] = [...(array[uniqueId-1].children),...(item.children)];
}
/*Removing duplicate children entries*/
filteredArray[uniqueIds.indexOf(item.id)]['children'] = filteredArray[uniqueIds.indexOf(item.id)]['children']
.filter((elem, index, self) => self.findIndex((t) => {return t.id === elem.id}) === index)
})
console.log(util.inspect(filteredArray,false,null));
In functional programming way with es6 standards. I am assuming children array also contains duplicates. I enclosed the code in closures.
See the following link why I used util to print all the object in node console.log()
How can I get the full object in Node.js's console.log(), rather than '[Object]'?
(function() {
'use strict';
const util = require('util');
/** string constants */
const ID = 'id';
const CHILDREN = 'children';
/* Objects to modify */
const object1 = {
"id": 1,
"name": "name1",
"children": [
{ "id": 2, "name": "name2" },
{ "id": 5, "name": "name5" },
{ "id": 7, "name": "name7" }
]
};
const object2 = {
"id": 3,
"name": "name3",
"children": [
{ "id": 4, "name": "name4" }
]
};
const object3 = {
"id": 1,
"name": "name1",
"children": [
{ "id": 5, "name": "name5" },
{ "id": 6, "name": "name6" }
]
};
/**
* Concates the arrays
* #param { array } - a
* #param { array } - b
*/
const merge = (a, b) => {
return a.concat(b);
};
/**
* Removes Duplicates from the given array based on ID
* #param { array } - array to remove duplicates
* #return { array } - array without duplicates
*/
const removeDuplicates = (arr) => {
return arr.filter((obj, pos, arr) => {
return arr.map((m) => {
return m[ID];
}).indexOf(obj[ID]) === pos;
});
}
/**
* Groups items in array with particular key
* Currying technique
* #param { prop } - key to group
* #return { () => {} } - Method which in turn takes array as argument
*/
const groupBy = (prop) => (array) => {
return array.reduce((groups, item) => {
const val = item[prop];
groups[val] = groups[val] || [];
groups[val].push(item);
return groups;
}, {});
}
/**
* Object containing grouped-items by particuar key
*/
const grouped = groupBy(ID)([object1, object2, object3]);
/**
* Removing the duplicates of children
* Remember map also mutates the array of objects key's value
* but not data type
*/
Object.keys(grouped).map((key, position) => {
grouped[key].reduce((a, b) => {
a[CHILDREN] = removeDuplicates(a[CHILDREN].concat(b[CHILDREN]));
});
});
/**
* Desired final output
*/
const final = Object.keys(grouped)
.map((key) => removeDuplicates(grouped[key]))
.reduce(merge, []);
console.log(util.inspect(final, false, null))})();
const object1 = {
"id":1,
"name":"name1",
"children":[{"id":2,"name":"name2"}]
};
const object2 = {
"id":3,
"name":"name3",
"children":[{"id":4,"name":"name4"}]
};
const object3 = {
"id":1,
"name":"name1",
"children":[{"id":6,"name":"name6"}]
};
var array = [object1,object2,object3];
var array2 = [object1,object2,object3];
function uniquearray(obj){
var result =[];
for(var i=0;i<array.length;i++){
if(obj.id == array[i].id){
result.push(array[i])
array.splice(i,1)
}
}
return result;
}
var arrayofuniarrays = []
for(var i=0;i<array2.length;i++){
arrayofuniarrays.push(uniquearray(array2[i]))
}
for(var i=0;i<arrayofuniarrays.length;i++){
for(var j=1;j<arrayofuniarrays[i].length; j++){
arrayofuniarrays[i][0].children.push(arrayofuniarrays[i][j].children)
arrayofuniarrays[i].splice(j,1)
}
}
var resul = arrayofuniarrays.reduce(function(a, b){return a.concat(b)},[])
console.log(resul)
Here is a sketch example of how to do this. It leverages a mapped type using your id as a key to ensure each item only appears once. It adds all of the children to an array based on the id.
If you needed to enforce the same behaviour on the children, you could use the same technique.
I have split this into multiple iterations to show you the individual parts in play.
Usually, it is more efficient to avoid creating objects that need to be zipped back up if you can.
const object1 = {
"id": 1,
"name": "name1",
"children": [{ "id": 2, "name": "name2" }]
};
const object2 = {
"id": 3,
"name": "name3",
"children": [{ "id": 4, "name": "name4" }]
};
const object3 = {
"id": 1,
"name":"name1",
"children":[{"id":6,"name":"name6"}]
};
const all = [object1, object2, object3];
// Use a map like a dictionary to enforce unique keys
const mapped = {};
for (let obj of all) {
if (!mapped[obj.id]) {
mapped[obj.id] = obj;
continue;
}
mapped[obj.id].children.push(obj.children);
}
console.log('Mapped ==> '+JSON.stringify(mapped));
// If you want to convert the mapped type to an array
const result = [];
for (let key in mapped) {
result.push(mapped[key]);
}
console.log('Array ==> '+JSON.stringify(result));
Building on #Peter Seliger's answer here, I derived with the following method to merge arrays with deeply nested children.
Given the following objects:
var obj1 = {
"id": 1,
"name": "name1",
"children": [{ "id": 2, "name": "name2", children:[{ "id":8, "name": "name8" }] }]
};
var obj2 = {
"id": 3,
"name": "name3",
"children": [{ "id": 4, "name": "name4", children:[{ "id":9, "name": "name9" }] }]
};
var obj3 = {
"id": 1,
"name": "name1",
"children": [{ "id": 6, "name": "name6", children:[{ "id":10, "name": "name10" }] }]
};
var obj4 = {
"id": 3,
"name": "name3",
"children": [{ "id": 4, "name": "name4", children:[{ "id":11, "name": "name11" }] }]
};
First we merge the parents
function mergeEquallyLabeledTypes(collector, type) {
var key = (type.name + '#' + type.id); // identity key.
var store = collector.store;
var storedType = store[key];
if (storedType) { // merge `children` of identically named types.
if(storedType.children)
storedType.children = storedType.children.concat(type.children);
} else {
store[key] = type;
collector.list.push(type);
}
return collector;
}
var result = [obj1, obj2, obj3, obj4].reduce(mergeEquallyLabeledTypes, {
store: {},
list: []
}).list;
Then we merge the children and subchildren if any.
for(let i=0; i<result.length; i++){
var children = result[i].children;
if(children){
var reducedChildren = children.reduce(mergeEquallyLabeledTypes, {store: {}, list: []}).list;
for(let j=0; j<reducedChildren.length; j++){
var subchildren = reducedChildren[j].children;
if(subchildren){
var reducedSubchildren = subchildren.reduce(mergeEquallyLabeledTypes, {store: {}, list: []}).list;
reducedChildren[j].children = reducedSubchildren;
}
}
result[i].children = reducedChildren;
}
}
Finally the result will be what I'll parse into my website.
console.log('result : ', result);
I am able to get the expected result.
// result: [{
// "id": 1,
// "name": name1,
// "children": [
// { "id": 2, "name": "name2", children:[{ "id":8, "name": "name8" }] },
// { "id": 6, "name": "name6", children:[{ "id":10, "name": "name10" }] }
// ]
// }, {
// "id": 3,
// "name": "name3",
// "children": [{"id": 4, "name": "name4", children:[
// { "id":9, "name": "name9" },
// { "id":11, "name": "name11" }
// ]
// }
// ]
// }]
However, this might not be too efficient as I'll need to keep adding on to the merging of children/subchildren method if my tree get nested with more levels. (e.g. subsubchildren, subsubsubchildren and so on...)
Is there any more efficient way to do this?
const object1 = {
id:1,
name:'a',
}
const object2 = {
id:3,
name:'b',
}
const object3 = {
id:1,
name:'c',
}
const originArr = [object1, object2, object3]
const idArr = [object1.id, object2.id, object3.id]
const newIdArr = []
for (let id of idArr) {
if (newIdArr.indexOf(id)) newIdArr.push(id)
}
const result = newIdArr.map(id => {
let names = []
for (obj of originArr) {
if (id === obj.id) names.push(obj.name)
}
return { id, names }
})
console.log(result)
I got a very simple json but in each block I got something like this.
var json = {
"name": "blabla"
"Children": [{
"name": "something"
"Children": [{ ..... }]
}
And so on. I don't know how many children there are inside each children recursively.
var keys = Object.keys(json);
for (var j = 0; j < keys.length; j++) {
var key = keys[j];
var value = json[key];
delete json[key];
key = key.replace("Children", "children");
json[key] = value;
}
And now I want to replace all "Children" keys with lowercase "children". The following code only works for the first depth. How can I do this recursively?
It looks the input structure is pretty well-defined, so you could simply create a recursive function like this:
function transform(node) {
return {
name: node.name,
children: node.Children.map(transform)
};
}
var json = {
"name": "a",
"Children": [{
"name": "b",
"Children": [{
"name": "c",
"Children": []
}, {
"name": "d",
"Children": []
}]
}, {
"name": "e",
"Children": []
}]
};
console.log(transform(json));
A possible solution:
var s = JSON.stringify(json);
var t = s.replace(/"Children"/g, '"children"');
var newJson = JSON.parse(t);
Pros: This solution is very simple, being just three lines.
Cons: There is a potential unwanted side-effect, consider:
var json = {
"name": "blabla",
"Children": [{
"name": "something",
"Children": [{ ..... }]
}],
"favouriteWords": ["Children","Pets","Cakes"]
}
The solution replaces all instances of "Children", so the entry in the favouriteWords array would also be replaced, despite not being a property name. If there is no chance of the word appearing anywhere else other than as the property name, then this is not an issue, but worth raising just in case.
Here is a function that can do it recursivly:
function convertKey(obj) {
for (objKey in obj)
{
if (Array.isArray(obj[objKey])) {
convertKey[objKey].forEach(x => {
convertKey(x);
});
}
if (objKey === "Children") {
obj.children = obj.Children;
delete obj.Children;
}
}
}
And here is a more generic way for doing this:
function convertKey(obj, oldKey, newKey) {
for (objKey in obj)
{
if (Array.isArray(obj[objKey])) {
obj[objKey].forEach(objInArr => {
convertKey(objInArr);
});
}
if (objKey === oldKey) {
obj[newKey] = obj[oldKey];
delete obj[oldKey];
}
}
}
convertKey(json, "Children", "children");
Both the accepted answer, and #Tamas answer have slight issues.
With #Bardy's answer like he points out, there is the issue if any of your values's had the word Children it would cause problems.
With #Tamas, one issue is that any other properties apart from name & children get dropped. Also it assumes a Children property. And what if the children property is already children and not Children.
Using a slightly modified version of #Tamas, this should avoid the pitfalls.
function transform(node) {
if (node.Children) node.children = node.Children;
if (node.children) node.children = node.children.map(transform);
delete node.Children;
return node;
}
var json = {
"name": "a",
"Children": [{
"age": 13,
"name": "b",
"Children": [{
"name": "Mr Bob Chilren",
"Children": []
}, {
"name": "d",
"age": 33, //other props keep
"children": [{
"name": "already lowecased",
"age": 44,
"Children": [{
"name": "now back to upercased",
"age": 99
}]
}] //what if were alrady lowercased?
}]
}, {
"name": "e",
//"Children": [] //what if we have no children
}]
};
console.log(transform(json));
Sorry if this has been asked before. I have the JSON structure like:
{"data":[
{"Date":"03/04/2016","Key":"A","Values":"123"},
{"Date":"04/04/2016","Key":"A","Values":"456"},
{"Date":"03/04/2016","Key":"B","Values":"789"},
{"Date":"04/04/2016","Key":"B","Values":"012"}
]}
I want to change this to a different format which is basically grouped by Key and combines rest of the field in Values
{"Result":[
{
"Key":"A"
"Values":[["03/04/2016","123"], ["04/04/2016","456"]]
},
{"Key":"B"
"Values":[["03/04/2016","789"]},["04/04/2016","012"]]
}
]}
I want to do this javascript/html
You could iterate and build a new object if not exist.
var object = { "data": [{ "Date": "03/04/2016", "Key": "A", "Values": "123" }, { "Date": "04/04/2016", "Key": "A", "Values": "456" }, { "Date": "03/04/2016", "Key": "B", "Values": "789" }, { "Date": "04/04/2016", "Key": "B", "Values": "012" }], result: [] };
object.data.forEach(function (a) {
if (!this[a.Key]) {
this[a.Key] = { Key: a.Key, Values: [] };
object.result.push(this[a.Key]);
}
this[a.Key].Values.push([a.Date, a.Values]);
}, Object.create(null));
console.log(object);
I think this can be a better answer (but Nina's answer is the match for your problem terms) if items of data array have different properties and you don't want to change input data.
var raw = {"data":[
{"Date":"03/04/2016","Key":"A","Values":"123"},
{"Date":"04/04/2016","Key":"A","Values":"456"},
{"Date":"03/04/2016","Key":"B","Values":"789"},
{"Date":"04/04/2016","Key":"B","Values":"012"}
]};
var result = new Map;
raw.data.forEach(entry => {
var key = entry.Key;
if (this[key])
return this[key].push(getClonedData(entry));
this[key] = [getClonedData(entry)];
result.set(key, {
Key: key,
Values: this[key]
})
}, Object.create(null));
var filtered = {
result: [...result.values()]
}
console.log(filtered);
function getClonedData(entry) {
data = Object.assign({}, entry);
delete data.Key;
return data;
}