I would like to know if there is a way to find the intersection of a key value pair in an array of objects. Let's say you have an array of three objects which all have the same keys like this :
arrayOfObj = [
{
"a": 1,
"b": "stringB"
"c": {"c1":1,
"c2": "stringC2"
}
},
{
"a": 1,
"b": "stringBdiff"
"c": {"c1":1,
"c2": "stringC2"
}
},
{
"a": 1,
"b": "stringB"
"c": {"c1":1,
"c2": "stringC2"
}
}
]
I would like to find the common key value pairs of the three objects:
output= [
{"a":1},
{"c": {"c1":1,
"c2":"stringC2"
}
}
]
This is what I have done so far, it works but not on nested objects. I would like to know if there is a more elegant way to do it and one that could work on nested object as well.
let properties;
let commonFound = false;
let notCommonFound = false;
const commonValues = [];
let value;
const initialArray = [{
"a": 2,
"b": "stringB",
"c": {
"c1": 1,
"c2": "stringC2"
}
},
{
"a": 1,
"b": "stringB",
"c": {
"c1": 2,
"c2": "stringC2"
}
},
{
"a": 1,
"b": "stringB",
"c": {
"c1": 2,
"c2": "stringC2"
}
}
];
const commonStorage = [];
const reference = initialArray[0];
properties = Object.keys(reference);
properties.forEach((property) => {
for (let i = 0; i < initialArray.length; i++) {
commonFound = false;
notCommonFound = false;
for (let j = 0; j <i ; j++) {
if (initialArray[i][property] === initialArray[j][property]) {
commonFound = true;
value = initialArray[i][property];
}
else {
notCommonFound = true;
value = [];
}
}
}
if (commonFound && !notCommonFound) {
commonStorage.push({[property] : value});
}
});
console.log(commonStorage);
Before we implement intersect we'll first look at how we expect it to behave –
console.log
( intersect
( { a: 1, b: 2, d: 4 }
, { a: 1, c: 3, d: 5 }
)
// { a: 1 }
, intersect
( [ 1, 2, 3, 4, 6, 7 ]
, [ 1, 2, 3, 5, 6 ]
)
// [ 1, 2, 3, <1 empty item>, 6 ]
, intersect
( [ { a: 1 }, { a: 2 }, { a: 4, b: 5 }, ]
, [ { a: 1 }, { a: 3 }, { a: 4, b: 6 }, ]
)
// [ { a: 1 }, <1 empty item>, { a: 4 } ]
, intersect
( { a: { b: { c: { d: [ 1, 2 ] } } } }
, { a: { b: { c: { d: [ 1, 2, 3 ] } } } }
)
// { a: { b: { c: { d: [ 1, 2 ] } } } }
)
Challenging problems like this one are made easier by breaking them down into smaller parts. To implement intersect we will plan to merge two calls to intersect1, each contributing one side of the computed result –
const intersect = (left = {}, right = {}) =>
merge
( intersect1 (left, right)
, intersect1 (right, left)
)
Implementing intersect1 is remains relatively complex due to the need to support both objects and arrays – the sequence of map, filter, and reduce helps maintain a flow of the program
const intersect1 = (left = {}, right = {}) =>
Object.entries (left)
.map
( ([ k, v ]) =>
// both values are objects
isObject (v) && isObject (right[k])
? [ k, intersect (v, right[k]) ]
// both values are "equal"
: v === right[k]
? [ k, v ]
// otherwise
: [ k, {} ]
)
.filter
( ([ k, v ]) =>
isObject (v)
? Object.keys (v) .length > 0
: true
)
.reduce
( assign
, isArray (left) && isArray (right) ? [] : {}
)
Lastly we implement merge the same way we did in the other Q&A –
const merge = (left = {}, right = {}) =>
Object.entries (right)
.map
( ([ k, v ]) =>
isObject (v) && isObject (left [k])
? [ k, merge (left [k], v) ]
: [ k, v ]
)
.reduce (assign, left)
The final dependencies –
const isObject = x =>
Object (x) === x
const isArray =
Array.isArray
const assign = (o, [ k, v ]) =>
(o [k] = v, o)
Verify the complete program works in your browser below –
const isObject = x =>
Object (x) === x
const isArray =
Array.isArray
const assign = (o, [ k, v ]) =>
(o [k] = v, o)
const merge = (left = {}, right = {}) =>
Object.entries (right)
.map
( ([ k, v ]) =>
isObject (v) && isObject (left [k])
? [ k, merge (left [k], v) ]
: [ k, v ]
)
.reduce (assign, left)
const intersect = (left = {}, right = {}) =>
merge
( intersect1 (left, right)
, intersect1 (right, left)
)
const intersect1 = (left = {}, right = {}) =>
Object.entries (left)
.map
( ([ k, v ]) =>
isObject (v) && isObject (right[k])
? [ k, intersect (v, right[k]) ]
: v === right[k]
? [ k, v ]
: [ k, {} ]
)
.filter
( ([ k, v ]) =>
isObject (v)
? Object.keys (v) .length > 0
: true
)
.reduce
( assign
, isArray (left) && isArray (right) ? [] : {}
)
console.log
( intersect
( { a: 1, b: 2, d: 4 }
, { a: 1, c: 3, d: 5 }
)
// { a: 1 }
, intersect
( [ 1, 2, 3, 4, 6, 7 ]
, [ 1, 2, 3, 5, 6 ]
)
// [ 1, 2, 3, <1 empty item>, 6 ]
, intersect
( [ { a: 1 }, { a: 2 }, { a: 4, b: 5 }, ]
, [ { a: 1 }, { a: 3 }, { a: 4, b: 6 }, ]
)
// [ { a: 1 }, <1 empty item>, { a: 4 } ]
, intersect
( { a: { b: { c: { d: [ 1, 2 ] } } } }
, { a: { b: { c: { d: [ 1, 2, 3 ] } } } }
)
// { a: { b: { c: { d: [ 1, 2 ] } } } }
)
intersectAll
Above intersect only accepts two inputs and in your question you want to compute the intersect of 2+ objects. We implement intersectAll as follows -
const None =
Symbol ()
const intersectAll = (x = None, ...xs) =>
x === None
? {}
: xs .reduce (intersect, x)
console.log
( intersectAll
( { a: 1, b: 2, c: { d: 3, e: 4 } }
, { a: 1, b: 9, c: { d: 3, e: 4 } }
, { a: 1, b: 2, c: { d: 3, e: 5 } }
)
// { a: 1, c: { d: 3 } }
, intersectAll
( { a: 1 }
, { b: 2 }
, { c: 3 }
)
// {}
, intersectAll
()
// {}
)
Verify the results in your browser –
const isObject = x =>
Object (x) === x
const isArray =
Array.isArray
const assign = (o, [ k, v ]) =>
(o [k] = v, o)
const merge = (left = {}, right = {}) =>
Object.entries (right)
.map
( ([ k, v ]) =>
isObject (v) && isObject (left [k])
? [ k, merge (left [k], v) ]
: [ k, v ]
)
.reduce (assign, left)
const intersect = (left = {}, right = {}) =>
merge
( intersect1 (left, right)
, intersect1 (right, left)
)
const intersect1 = (left = {}, right = {}) =>
Object.entries (left)
.map
( ([ k, v ]) =>
isObject (v) && isObject (right[k])
? [ k, intersect (v, right[k]) ]
: v === right[k]
? [ k, v ]
: [ k, {} ]
)
.filter
( ([ k, v ]) =>
isObject (v)
? Object.keys (v) .length > 0
: true
)
.reduce
( assign
, isArray (left) && isArray (right) ? [] : {}
)
const None =
Symbol ()
const intersectAll = (x = None, ...xs) =>
x === None
? {}
: xs .reduce (intersect, x)
console.log
( intersectAll
( { a: 1, b: 2, c: { d: 3, e: 4 } }
, { a: 1, b: 9, c: { d: 3, e: 4 } }
, { a: 1, b: 2, c: { d: 3, e: 5 } }
)
// { a: 1, c: { d: 3 } }
, intersectAll
( { a: 1 }
, { b: 2 }
, { c: 3 }
)
// {}
, intersectAll
()
// {}
)
remarks
You'll want to consider some things like –
intersect
( { a: someFunc, b: x => x * 2, c: /foo/, d: 1 }
, { a: someFunc, b: x => x * 3, c: /foo/, d: 1 }
)
// { d: 1 } (actual)
// { a: someFunc, c: /foo/, d: 1 } (expected)
We're testing for what's considered equal here in intersect1 –
const intersect1 = (left = {}, right = {}) =>
Object.entries (left)
.map
( ([ k, v ]) =>
isObject (v) && isObject (right[k])
? [ k, intersect (v, right[k]) ]
: v === right[k] // <-- equality?
? [ k, v ]
: [ k, {} ]
)
.filter
( ...
If we want to support things like checking for equality of Functions, RegExps, or other objects, this is where we would make the necessary modifications
recursive diff
In this related Q&A we compute the recursive diff of two objects
Related
I'm trying to reduce an array, and transform it in multiple array.
const array = [
{ a: 1, b: 6 },
{ a: 1, b: 5 },
{ a: 1, b: 6 },
{ a: 1, b: 4 },
{ a: 1, b: 5 }
];
var newArray = array.reduce(
(memo, curr) => {
memo.forEach((item, key) => {
const found = item.filter((el) => el.a === curr.a && el.b === curr.b);
if (found.length > 0) return memo[key].push(curr);
else return memo.push([curr]);
});
return memo;
},
[[]]
);
The needed result I try to get is
[
[
{ a: 1, b: 5 },
{ a: 1, b: 5 }
],
[
{ a: 1, b: 6 },
{ a: 1, b: 6 },
],
[
{ a: 1, b: 4 },
]
];
But as you can see if you try, because I push on the memo, the loop continue to fire. And the result contain hundreds arrays.
How I'm supposed to do to limit this loop and get the right result ?
Thanks a lot in advance :)
You could use Map to group the element by the key of {a, b}, and then get the values of the group
const array = [
{ a: 1, b: 6 },
{ a: 1, b: 5 },
{ a: 1, b: 6 },
{ a: 1, b: 4 },
{ a: 1, b: 5 },
];
var newArray = Array.from(
array
.reduce((map, curr) => {
const key = JSON.stringify({ a: curr.a, b: curr.b });
if (!map.has(key)) {
map.set(key, []);
}
map.get(key).push(curr);
return map;
}, new Map())
.values()
);
console.log(newArray);
Look at your code. You have a triple nested loop, which is insane and definitely not needed to achieve this. Why not use a map?
Here is a function that will do what you want to do with any array of objects given.
const array = [
{ a: 1, b: 6 },
{ a: 1, b: 5 },
{ a: 1, b: 6 },
{ a: 1, b: 4 },
{ a: 1, b: 5 },
];
const separate = (arr) => {
const reduced = arr.reduce((acc, curr) => {
const path = JSON.stringify(curr);
if (!acc[path]) acc[path] = [];
acc[path].push(curr);
return acc;
}, {});
return Object.values(reduced);
};
console.log(separate(array));
If you push inside for loop it will going to push for every reduce function iteration also.
you can achieve by adding some local variables like here
const array = [
{ a: 1, b: 6 },
{ a: 1, b: 5 },
{ a: 1, b: 6 },
{ a: 1, b: 4 },
{ a: 1, b: 5 }
];
// shift changes the orginal array
// it will remove and return firstElement
var firstElement = array.shift(1);
var newArray = array.reduce(
(memo, curr) => {
let isFound = false;
let index = 0;
memo.forEach((item, key) => {
const found = item.filter((el) => el.a === curr.a && el.b === curr.b);
if(found.length > 0){
index = key;
isFound = true;
return;
}
});
if(isFound) {
memo[index].push(curr);
} else {
memo.push([curr]);
}
return memo;
},
[[firstElement]]
);
console.log(newArray);
I have a nested object look like this:
let obj = {
F:{
asian: {
"35-44": 1,
"55-": 1,
},
"asian/black": {
"0-24": 1,
"35-44": 1,
"45-54": 2,
},
},
M:{
asian: {
"35-44": 1,
"55-": 1,
},
white: {
"0-24": 1,
"35-44": 1,
"45-54": 2,
},
},
}
And I want to flatten the object to this:
res = {
F: 6,
M: 6,
asian: 4,
"asian/black": 4,
white: 4,
"0-24": 2,
"35-44": 4,
"45-54": 4,
"55-": 2,
}
That every value in res should be the sum of the deepest object values(F, M) and object values with the same key(0-24, 35-44...). I feel this can be done using recursion and just can't get it right. The code I write:
let returnVal = 0
const flatten = (obj, prefix = '', res = {}) => {
return Object.entries(obj).reduce((r, [key, val]) => {
if(typeof val === 'object'){
flatten(val, key, r)
} else {
res[key] = val
returnVal = val;
}
if (key in res) {
res[key] += returnVal
} else {
res[key] = 0
res[key] += returnVal
}
return r
}, res)
}
console.log(flatten(obj))
it will output:
result = {
"0-24": 2,
"35-44": 2,
"45-54": 4,
"55-": 2,
F: 2,
M: 2,
asian: 2,
"asian/black": 2,
white: 2,
}
F, M, and some other keys are not correct. Thanks!
Another, perhaps simpler, approach is as follows:
const consolidate = (obj, path = [], results = {}) =>
Object .entries (obj) .reduce ((results, [k, v]) =>
Object (v) === v
? consolidate (v, [...path, k], results)
: [...path, k] .reduce (
(results, n) => ({...results, [n] : (results[n] || 0) + v}),
results
),
results)
const data = {F: {asian: {"35-44": 1, "55-": 1}, "asian/black": {"0-24": 1, "35-44": 1, "45-54": 2}}, M: {asian: {"35-44": 1, "55-": 1}, white: {"0-24": 1, "35-44": 1, "45-54": 2}}}
console .log (consolidate (data))
.as-console-wrapper {min-height: 100% !important; top: 0}
We recursively track paths taken through the object, such as ['F', 'asian/black', '45-54'] or ['M', 'white'] or simply ['f'] as well as an object containing the final results. When we the value at the current node is an object, we recur, adding the current property name to the path. When it's not (for this data it must therefore hit a number), we hit a base case in which we take each node in the current path, and update the results object by adding that number to the value for the node in the results object, or setting it to the current value if that value doesn't exist.
There is a potential issue with the default parameters, as described in another Q & A. If someone tried to map the consolidate function directly over an array of input objects, it would fail. If this is a concern, it's easy enough to swap the default parameters for a wrapper function:
const _consolidate = (obj, path, results) =>
Object .entries (obj) .reduce ((results, [k, v]) =>
Object (v) === v
? _consolidate (v, [...path, k], results)
: [...path, k] .reduce (
(results, n) => ({...results, [n] : (results[n] || 0) + v}),
results
),
results)
const consolidate = (obj) =>
_consolidate (obj, [], {})
const data = {
F: {
asian: {
"35-44": 1,
"55-": 1,
},
"asian/black": {
"0-24": 1,
"35-44": 1,
"45-54": 2,
},
},
M: {
asian: {
"35-44": 1,
"55-": 1,
},
white: {
"0-24": 1,
"35-44": 1,
"45-54": 2,
},
},
};
const isObject = obj => Object.prototype.toString.call(obj) === "[object Object]";
function nestKeys(obj, parent = "") {
return Object.keys(obj).map(key => {
const k = parent.length ? [parent, key].join(".") : key;
if (!isObject(obj[key])) {
return k;
}
return nestKeys(obj[key], k);
}).flat();
}
function flatObj(obj) {
const map = {};
const keys = nestKeys(obj);
keys.forEach(nestedKey => {
const splited = nestedKey.split(".");
const val = splited.reduce((acc, cur) => acc[cur], obj);
splited.forEach(k => {
map[k] = (map[k] || 0) + val;
})
});
return map;
}
console.log(flatObj(data));
Consider this problem:
Create a function zipmap that takes in two sequences, and creates a dictionary from the elements of the first sequence to the elements of the second.
zipmap([1, 2, 3], [4, 5, 6]) => {1: 4, 2: 5, 3: 6}
My solution is below as an answer, can anyone come up with a better way of doing it?
This is already built into Ramda, as zipObj:
console .log (
R.zipObj ([1, 2, 3], [4, 5, 6])
)
<script src="//cdnjs.cloudflare.com/ajax/libs/ramda/0.26.1/ramda.js"></script>
And it's also now a language feature, maybe not yet quite widely enough supported, but getting close: Object.fromEntries.
const zipmap = (arr1 ,arr2) => arr1.reduce((p,c,i) => {
p[c] = arr2[i];
return p;
},{});
Here's a simple recursive implementation -
// None : symbol
const None =
Symbol()
// zipMap : ('k array, 'v array) -> ('k, 'v) object
const zipMap = ([ k = None, ...keys ] = [], [ v = None, ...values ] = []) =>
k === None || v === None
? {}
: { [k]: v, ...zipMap(keys, values) }
console.log(zipMap([ 1, 2, 3 ], [ 4, 5, 6 ]))
// { 1: 4, 2: 5, 3: 6 }
But it's not much of a "mapping" function; it always returns an Object. What if you wanted a different result?
// None : symbol
const None =
Symbol()
// identity : 'a -> 'a
const identity = x =>
x
// zipMap : (('k, 'v) -> ('kk, 'vv), 'k array, 'v array) -> ('kk, 'vv) array
const zipMap =
( f = identity // ('k, v') -> ('kk, 'vv)
, [ k = None, ...keys ] = [] // 'k array
, [ v = None, ...values ] = [] // 'v array
) => // ('kk, 'vv) array
k === None || v === None
? []
: [ f ([ k, v ]), ...zipMap(f, keys, values) ]
// result : (number, number) array
const result =
zipMap
( identity
, [ 1, 2, 3 ]
, [ 4, 5, 6 ]
)
console.log(result)
// [ [ 1, 4 ], [ 2, 5 ], [ 3, 6 ] ]
console.log(Object.fromEntries(result))
// { 1: 4, 2: 5, 3: 6 }
// result2 : (number, number) array
const result2 =
zipMap
( ([ k, v ]) => [ k * 10, v * 100 ]
, [ 1, 2, 3 ]
, [ 4, 5, 6 ]
)
console.log(Object.fromEntries(result2))
// { 10: 400, 20: 500, 30: 600 }
Instead of creating an Object using Object.fromEntries, you could just as easily create a Map too -
// result2 : (number, number) array
const result2 =
zipMap
( ([ k, v ]) => [ k * 10, v * 100 ]
, [ 1, 2, 3 ]
, [ 4, 5, 6 ]
)
// m : (number, number) map
const m =
new Map(result2)
// Map { 10 => 400, 20 => 500, 30 => 600 }
const R = require('ramda')
const zipmapSeparate = (...arr) => arr[0].map((zeroEntry, index) => {
const item = {}
item[arr[0][index]] = arr[1][index]
return item
})
const zipmapReduce = (zipmap1) => zipmap1.reduce((accumulator, current) => {
const key = Object.keys(current)[0]
const value = Object.values(current)[0]
accumulator[key]=value
return accumulator
}, {})
const zipmap = R.compose(zipmapReduce, zipmapSeparate)
console.log(zipmap([1, 2, 3], [4, 5, 6]))
I'm trying to create an updated object from an existing object.
The sample object is:
// sample object
const testObj = {
a: 1,
b: {
c: 2,
d: {
e: 3,
f: {
g: 4
}
}
}
};
I want to create a new object from the above object with some concatenation of each value:
// expected object
const expectedObject= {
a: '1 a',
b: {
c: '2 a',
d: {
e: '3 a',
f: {
g: '4 a'
}
}
}
};
here is my sample code:
let expectedObject = {};
const newObject = object => {
Object.entries(object).forEach(([key, value]) => {
if (typeof value === "object") {
Object.keys(value).map(key => {
value[key] = value[key] + " a";
return value;
});
expectedObject[key] = value;
//return newTest;
} else {
expectedObject[key] = value;
return expectedObject;
}
});
return expectedObject;
};
console.log(newObject(testObj));
the outcome in console is:
{a: 1, b: {…}}
a: 1
b:
c: "2 a"
d: "[object Object] a"
__proto__: Object
__proto__: Object
I wanted to use recursion here and also tried it but no luck.
any help, thanks?
You could get a new object my mapping changed values and creating new objects.
function map(object, fn) {
return Object.fromEntries(Object
.entries(object)
.map(([k, v]) => [k, v && typeof v === 'object' ? map(v, fn) : fn(v)])
);
}
var object = { a: 1, b: { c: 2, d: { e: 3, f: { g: 4 } } } },
result = map(object, v => v + ' a');
console.log(result);
If you have arrays inside, you could add a check in advance and map the values.
const
map = fn => {
const iter = v => v && typeof v === 'object'
? Array.isArray(v)
? v.map(iter)
: Object.fromEntries(Object.entries(v).map(([k, v]) => [k, iter(v, fn)]))
: fn(v);
return iter;
};
var object = { a: 1, b: { c: 2, d: { e: 3, f: { g: 4, k: [5, 6] } } } },
addA = map(v => v + ' a'),
result = addA(object);
console.log(result);
This is simply a refactoring of the answer from #user633183. I like that approach a lot, but think it can be simplified by extracting two more reusable functions. This started as a comment on that answer, but I thought it would be better to be explicit.
const map = (f) => (a) =>
a.map(f)
const mapObj = (f) => (o) =>
Object .entries (o) .reduce ( (a, [k, v] ) => ({ ...a, [k]: f(v) }), {})
const traverse = (f) => (t) =>
Array.isArray(t)
? map (traverse (f)) (t)
: Object(t) === t
? mapObj (traverse (f)) (t)
: f (t)
const input =
{ a: [ 1, 11, 111 ], b: { c: 2, d: { e: [ 3, { f: { g: 4 } } ] } } }
const output =
traverse(x => `${x} a`) (input)
console.log(output)
mapObj can be written in many different ways. Here are two alternatives:
const mapObj = (f = identity) => (o = {}) =>
Object .fromEntries (Object .entries (o) .map (([ k, v ]) => [ k, f (v) ]))
const mapObj = (f = identity) => (o = {}) =>
Object .assign .apply (null, Object .entries (o) .map (([ k, v ]) => ({ [k]: f (v)
Here's an approach using a modification of the original code to demonstrate what needed to be changed in order to make it work. You had some things switched up reading the value and setting the new one. Also I'm using the spread operator to clone the object before modifying it.
const testObj = {
a: 1,
b: {
c: 2,
d: {
e: 3,
f: {
g: 4
}
}
}
};
const newObject = object => {
const clonedObj = { ...object };
const entries = Object.entries(clonedObj);
entries.forEach(([key, value]) => {
if (typeof value === "object") {
clonedObj[key] = newObject(value);
} else {
clonedObj[key] = value + " a";
}
});
return clonedObj;
};
console.log(newObject(testObj));
console.log(testObj); // prove that the original object hasn't changed
Here's a simple recursive technique. It is similar to Nina's but it preserves arrays, if present in the structure.
If the input, t, is an array, create a new array by traversing each array value, v, with the traversing function, f
(inductive) Otherwise t is not an array. If t is an object, create a new object from key value pairs, [ k, v ], by traversing each value, v, with the traversing function, f
(inductive) Otherwise t is not an array and t is not an object. This means t is either a primitive value, such as string, number, or null
Numbered comments below correspond to the explanation above -
const identity = x =>
x
const traverse = (f = identity, t = {}) =>
Array.isArray(t) // 1
? Array.from(t, v => traverse(f, v))
: Object(t) === t // 2
? Object.fromEntries(Object.entries(t).map(([ k, v ]) => [ k, traverse(f, v) ]))
: f (t) // 3
const input =
{ a: [ 1, 11, 111 ], b: { c: 2, d: { e: [ 3, { f: { g: 4 } } ] } } }
const output =
traverse(x => `${x} a`, input)
console.log(output)
Here is a solution using object-scan. It works by building the solution at the same time as the input is traversed.
// const objectScan = require('object-scan');
const testObj = { a: 1, b: { c: 2, d: { e: 3, f: { g: 4 } } } };
const cloneAndModify = (obj) => objectScan(['**'], {
breakFn: ({ property, value, isLeaf, context }) => {
if (property === undefined) {
return;
}
const ref = context[context.length - 1];
if (!(property in ref)) {
ref[property] = isLeaf ? `${value} a` : {};
}
context.push(ref[property]);
},
filterFn: ({ context }) => {
context.pop();
}
})(obj, [{}])[0];
const r = cloneAndModify(testObj);
console.log(r);
// => { b: { d: { f: { g: '4 a' }, e: '3 a' }, c: '2 a' }, a: '1 a' }
.as-console-wrapper {max-height: 100% !important; top: 0}
<script src="https://bundle.run/object-scan#13.7.1"></script>
Disclaimer: I'm the author of object-scan
I'm trying to find an equivalent function to Lodash's merge using Ramda that does a recursive object key-based "merge" or "extend". The behavior is similar to the following:
let merged = R.someMethod(
{ name: 'Matt', address: { street: 'Hawthorne', number: 22, suffix: 'Ave' }},
{ address: { street: 'Pine', number: 33 }}
);
console.log(merged);
// => { name: 'Matt', address: { street: 'Pine', number: 33, suffix: 'Ave' }}
I noticed in the following pull request that R.set was briefly introduced, but then rolled back soon thereafter. Has this functionality been captured by the Ramda library since?
Is this functionality available in Ramda?
A relatively simple recursive function can be created using R.mergeWith.
function deepMerge(a, b) {
return (R.is(Object, a) && R.is(Object, b)) ? R.mergeWith(deepMerge, a, b) : b;
}
deepMerge({ name: 'Matt', address: { street: 'Hawthorne', number: 22, suffix: 'Ave' }},
{ address: { street: 'Pine', number: 33 }});
//=> {"address": {"number": 33, "street": "Pine", "suffix": "Ave"}, "name": "Matt"}
Ramda does not include such a function at the moment.
There have been several attempts to create one, but they seem to founder on the notion of what's really required of such a function.
Feel free to raise an issue if you think it's worth adding.
Update
(Two years later.) This was eventually added, in the form of several functions: mergeDeepLeft, mergeDeepRight, mergeDeepWith, and mergeDeepWithKey.
Ramda now has several merge functions: mergeDeepLeft, mergeDeepRight, mergeDeepWith, mergeDeepWithKey.
const { unapply, mergeDeepRight, reduce } = R
const mergeDeepRightAll = unapply(reduce(mergeDeepRight, {}))
console.log(mergeDeepRightAll({a:1, b: {c: 1}},{a:2, d: {f: 2}},{a:3, b: {c:3}}))
<script src="https://cdnjs.cloudflare.com/ajax/libs/ramda/0.25.0/ramda.min.js"></script>
from scratch
Newer functions in the Ramba library mean you don't have to do this on your own, but what if the maintainers never go around to it? You don't want to be stuck waiting on someone else to write your code when you need a feature or behavior right now.
Below, we implement our own recursive merge
const isObject = x =>
Object (x) === x
const merge = (left = {}, right = {}) =>
Object.entries (right)
.reduce
( (acc, [ k, v ]) =>
isObject (v) && isObject (left [k])
? { ...acc, [k]: merge (left [k], v) }
: { ...acc, [k]: v }
, left
)
Our merge function also works generically and accepts any two objects as input.
const x =
{ a: 1, b: 1, c: 1 }
const y =
{ b: 2, d: 2 }
console.log (merge (x, y))
// { a: 1, b: 2, c: 1, d: 2 }
In the event each object contains a property whose value is also an object, merge will recur and merge the nested objects as well.
const x =
{ a: { b: { c: 1, d: 1 } } }
const y =
{ a: { b: { c: 2, e: 2 } }, f: 2 }
console.log (merge (x, y))
// { a: { b: { c: 2, d: 1, e: 2 } }, f: 2 }
arrays are people too
To support arrays in merge, we introduce a mutation helper mut which assigns a [ key, value ] pair to a given object, o. Arrays are considered objects too, so we can update both arrays and objects using the same mut function
Note, Ramda's merging functions do not attempt to merge arrays. The primary advantage to writing your own functions is you can easily augment their behavior to meet your program's ever-evolving requirements.
const mut = (o, [ k, v ]) =>
(o [k] = v, o)
const merge = (left = {}, right = {}) =>
Object.entries (right)
.map
( ([ k, v ]) =>
isObject (v) && isObject (left [k])
? [ k, merge (left [k], v) ]
: [ k, v ]
)
.reduce (mut, left)
Shallow merges work as expected
const x =
[ 1, 2, 3, 4, 5 ]
const y =
[ 0, 0, 0 ]
const z =
[ , , , , , 6 ]
console.log (merge (x, y))
// [ 0, 0, 0, 4, 5 ]
console.log (merge (y, z))
// [ 0, 0, 0, <2 empty items>, 6 ]
console.log (merge (x, z))
// [ 1, 2, 3, 4, 5, 6 ]
And deep merges too
const x =
{ a: [ { b: 1 }, { c: 1 } ] }
const y =
{ a: [ { d: 2 }, { c: 2 }, { e: 2 } ] }
console.log (merge (x, y))
// { a: [ { b: 1, d: 2 }, { c: 2 }, { e: 2 } ] }
variadic merge
Maybe we want a merge function that is not limited to two inputs; mergeAll
const Empty =
{}
const mergeAll = (first = Empty, ...rest) =>
first === Empty
? first
: merge (first, mergeAll (...rest))
mergeAll ({ a: 1 }, { b: 2 }, { c: 3 })
// { a: 1, b: 2, c: 3 }
This answer is an excerpt from another question: How to compare two objects and get key-value pairs of their differences?