Convert bool to number. Better Algorithm? [closed] - javascript

Closed. This question needs to be more focused. It is not currently accepting answers.
Want to improve this question? Update the question so it focuses on one problem only by editing this post.
Closed 14 days ago.
Improve this question
Recently, I interviewed in one company and received a test task:
Implement a function that accepts any data type and convert the boolean type (if any) to a numeric value.
Objects of any nesting level, arrays, strings, numbers, etc. can be used as parameters.
Example:
booleanToInt('qwerty') // 'qwerty'
booleanToInt(1) // 1
booleanToInt(false) // 0
booleanToInt(true) // 1
booleanToInt([1, 'qwerty', false]) // [1, 'qwerty', 0]
booleanToInt([1, 'qwerty', { a: true }]) // [1, 'qwerty', { a: 1 }]
booleanToInt({ a: { b: true }, c: false, d: 'qwerty' }) // { a: { b: 1 }, c: 0, d: 'qwerty' }
I wrote my own implementation, but I was told that this is too complicated and a big solution. To my question, what and where can I optimize the answer was not given to me.
My decision:
// Checking for an object
function isObject(value) {
return typeof value === "object" && !Array.isArray(value) && value !== null;
}
// Transforming an object
function transformObject(obj) {
Object.keys(obj).forEach((key) => {
if (isObject(obj[key])) {
transformObject(obj[key]);
} else if (Array.isArray(obj[key])) {
transformArray(obj[key]);
} else {
obj[key] = transformStatic(obj[key]);
}
});
}
// Transforming the array
function transformArray(list) {
list.forEach((item, i) => {
if (isObject(item)) {
transformObject(item);
} else if (Array.isArray(item)) {
transformArray(item);
} else {
list[i] = transformStatic(item);
}
});
}
// Transforming Static Types
function transformStatic(value) {
let res = value;
if (typeof value === "boolean") {
res = +value;
}
return res;
}
// main function
function booleanToInt(value) {
if (isObject(value)) {
transformObject(value);
return value;
}
if (Array.isArray(value)) {
transformArray(value);
return value;
}
return transformStatic(value);
}
console.log(booleanToInt("qwerty")); // 'qwerty'
console.log(booleanToInt(1)); // 1
console.log(booleanToInt(false)); // 0
console.log(booleanToInt(true)); // 1
console.log(booleanToInt([1, "qwerty", false])); // [1, 'qwerty', 0]
console.log(booleanToInt([1, "qwerty", { a: true }])); //[1, 'qwerty', { a: 1 }]
LIVE HERE
I wonder what other algorithm can come up with?
Perhaps it will be shorter and more concise?

I'd simplify it to single function and use for ... in
function booleanToInt(value) {
if (typeof value === "boolean") return +value;
if (typeof value === "object") for (const k in value) value[k] = booleanToInt(value[k]);
return value;
}
console.log(booleanToInt("qwerty")); // 'qwerty'
console.log(booleanToInt(1)); // 1
console.log(booleanToInt(false)); // 0
console.log(booleanToInt(true)); // 1
console.log(booleanToInt([1, "qwerty", false])); // [1, 'qwerty', 0]
console.log(booleanToInt([1, "qwerty", { a: true }])); //[1, 'qwerty', { a: 1 }]

Related

How to find difference between two Object and get those changes merge in single Object in JavaScript? [duplicate]

Both Object.assign and Object spread only do a shallow merge.
An example of the problem:
// No object nesting
const x = { a: 1 }
const y = { b: 1 }
const z = { ...x, ...y } // { a: 1, b: 1 }
The output is what you'd expect. However if I try this:
// Object nesting
const x = { a: { a: 1 } }
const y = { a: { b: 1 } }
const z = { ...x, ...y } // { a: { b: 1 } }
Instead of
{ a: { a: 1, b: 1 } }
you get
{ a: { b: 1 } }
x is completely overwritten because the spread syntax only goes one level deep. This is the same with Object.assign().
Is there a way to do this?
I know this is a bit of an old issue but the easiest solution in ES2015/ES6 I could come up with was actually quite simple, using Object.assign(),
Hopefully this helps:
/**
* Simple object check.
* #param item
* #returns {boolean}
*/
export function isObject(item) {
return (item && typeof item === 'object' && !Array.isArray(item));
}
/**
* Deep merge two objects.
* #param target
* #param ...sources
*/
export function mergeDeep(target, ...sources) {
if (!sources.length) return target;
const source = sources.shift();
if (isObject(target) && isObject(source)) {
for (const key in source) {
if (isObject(source[key])) {
if (!target[key]) Object.assign(target, { [key]: {} });
mergeDeep(target[key], source[key]);
} else {
Object.assign(target, { [key]: source[key] });
}
}
}
return mergeDeep(target, ...sources);
}
Example usage:
mergeDeep(this, { a: { b: { c: 123 } } });
// or
const merged = mergeDeep({a: 1}, { b : { c: { d: { e: 12345}}}});
console.dir(merged); // { a: 1, b: { c: { d: [Object] } } }
You'll find an immutable version of this in the answer below.
Note that this will lead to infinite recursion on circular references. There's some great answers on here on how to detect circular references if you think you'd face this issue.
You can use Lodash merge:
var object = {
'a': [{ 'b': 2 }, { 'd': 4 }]
};
var other = {
'a': [{ 'c': 3 }, { 'e': 5 }]
};
console.log(_.merge(object, other));
// => { 'a': [{ 'b': 2, 'c': 3 }, { 'd': 4, 'e': 5 }] }
<script src="https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.21/lodash.min.js"></script>
The problem is non-trivial when it comes to host objects or any kind of object that's more complex than a bag of values
do you invoke a getter to obtain a value or do you copy over the property descriptor?
what if the merge target has a setter (either own property or in its prototype chain)? Do you consider the value as already-present or call the setter to update the current value?
do you invoke own-property functions or copy them over? What if they're bound functions or arrow functions depending on something in their scope chain at the time they were defined?
what if it's something like a DOM node? You certainly don't want to treat it as simple object and just deep-merge all its properties over into
how to deal with "simple" structures like arrays or maps or sets? Consider them already-present or merge them too?
how to deal with non-enumerable own properties?
what about new subtrees? Simply assign by reference or deep clone?
how to deal with frozen/sealed/non-extensible objects?
Another thing to keep in mind: Object graphs that contain cycles. It's usually not difficult to deal with - simply keep a Set of already-visited source objects - but often forgotten.
You probably should write a deep-merge function that only expects primitive values and simple objects - at most those types that the structured clone algorithm can handle - as merge sources. Throw if it encounters anything it cannot handle or just assign by reference instead of deep merging.
In other words, there is no one-size-fits-all algorithm, you either have to roll your own or look for a library method that happens to cover your use-cases.
Here is an immutable (does not modify the inputs) version of #Salakar's answer. Useful if you're doing functional programming type stuff.
export function isObject(item) {
return (item && typeof item === 'object' && !Array.isArray(item));
}
export default function mergeDeep(target, source) {
let output = Object.assign({}, target);
if (isObject(target) && isObject(source)) {
Object.keys(source).forEach(key => {
if (isObject(source[key])) {
if (!(key in target))
Object.assign(output, { [key]: source[key] });
else
output[key] = mergeDeep(target[key], source[key]);
} else {
Object.assign(output, { [key]: source[key] });
}
});
}
return output;
}
Update 2022:
I created mergician to address the various merge/clone requirements discussed in the comments. It is based on the same concept as my original answer (below) but offers configurable options:
Unlike native methods and other merge/clone utilities, Mergician provides advanced options for customizing the merge/clone process. These options make it easy to inspect, filter, and modify keys and properties; merge or skip unique, common, and universal keys (i.e., intersections, unions, and differences); and merge, sort, and remove duplicates from arrays. Property accessors and descriptors are also handled properly, ensuring that getter/setter functions are retained and descriptor values are defined on new merged/cloned objects.
Notably, mergician is significantly smaller (1.5k min+gzip) than similar utilities like lodash.merge (5.1k min+gzip).
GitHub: https://github.com/jhildenbiddle/mergician
NPM: https://www.npmjs.com/package/mergician
Docs: https://jhildenbiddle.github.io/mergician/
Original answer:
Since this issue is still active, here's another approach:
ES6/2015
Immutable (does not modify original objects)
Handles arrays (concatenates them)
/**
* Performs a deep merge of objects and returns new object. Does not modify
* objects (immutable) and merges arrays via concatenation.
*
* #param {...object} objects - Objects to merge
* #returns {object} New object with merged key/values
*/
function mergeDeep(...objects) {
const isObject = obj => obj && typeof obj === 'object';
return objects.reduce((prev, obj) => {
Object.keys(obj).forEach(key => {
const pVal = prev[key];
const oVal = obj[key];
if (Array.isArray(pVal) && Array.isArray(oVal)) {
prev[key] = pVal.concat(...oVal);
}
else if (isObject(pVal) && isObject(oVal)) {
prev[key] = mergeDeep(pVal, oVal);
}
else {
prev[key] = oVal;
}
});
return prev;
}, {});
}
// Test objects
const obj1 = {
a: 1,
b: 1,
c: { x: 1, y: 1 },
d: [ 1, 1 ]
}
const obj2 = {
b: 2,
c: { y: 2, z: 2 },
d: [ 2, 2 ],
e: 2
}
const obj3 = mergeDeep(obj1, obj2);
// Out
console.log(obj3);
I know there's a lot of answers already and as many comments arguing they won't work. The only consensus is that it's so complicated that nobody made a standard for it. However, most of accepted answers in SO expose "simple tricks" that are widely used. So, for all of us like me who are no experts but want to write safer code by grasping a little more about javascript's complexity, I'll try to shed some light.
Before getting our hands dirty, let me clarify 2 points:
[DISCLAIMER] I propose a function below that tackles how we deep loop into javascript objects for copy and illustrates what is generally too shortly commented. It is not production-ready. For sake of clarity, I have purposedly left aside other considerations like circular objects (track by a set or unconflicting symbol property), copying reference value or deep clone, immutable destination object (deep clone again?), case-by-case study of each type of objects, get/set properties via accessors... Also, I did not test performance -although it's important- because it's not the point here either.
I'll use copy or assign terms instead of merge. Because in my mind a merge is conservative and should fail upon conflicts. Here, when conflicting, we want the source to overwrite the destination. Like Object.assign does.
Answers with for..in or Object.keys are misleading
Making a deep copy seems so basic and common practice that we expect to find a one-liner or, at least, a quick win via simple recursion. We don't expect we should need a library or write a custom function of 100 lines.
When I first read Salakar's answer, I genuinely thought I could do better and simpler (you can compare it with Object.assign on x={a:1}, y={a:{b:1}}). Then I read the8472's answer and I thought... there is no getting away so easily, improving already given answers won't get us far.
Let's let deep copy and recursive aside an instant. Just consider how (wrongly) people parse properties to copy a very simple object.
const y = Object.create(
{ proto : 1 },
{ a: { enumerable: true, value: 1},
[Symbol('b')] : { enumerable: true, value: 1} } )
Object.assign({},y)
> { 'a': 1, Symbol(b): 1 } // All (enumerable) properties are copied
((x,y) => Object.keys(y).reduce((acc,k) => Object.assign(acc, { [k]: y[k] }), x))({},y)
> { 'a': 1 } // Missing a property!
((x,y) => {for (let k in y) x[k]=y[k];return x})({},y)
> { 'a': 1, 'proto': 1 } // Missing a property! Prototype's property is copied too!
Object.keys will omit own non-enumerable properties, own symbol-keyed properties and all prototype's properties. It may be fine if your objects don't have any of those. But keep it mind that Object.assign handles own symbol-keyed enumerable properties. So your custom copy lost its bloom.
for..in will provide properties of the source, of its prototype and of the full prototype chain without you wanting it (or knowing it). Your target may end up with too many properties, mixing up prototype properties and own properties.
If you're writing a general purpose function and you're not using Object.getOwnPropertyDescriptors, Object.getOwnPropertyNames, Object.getOwnPropertySymbols or Object.getPrototypeOf, you're most probably doing it wrong.
Things to consider before writing your function
First, make sure you understand what a Javascript object is. In Javascript, an object is made of its own properties and a (parent) prototype object. The prototype object in turn is made of its own properties and a prototype object. And so on, defining a prototype chain.
A property is a pair of key (string or symbol) and descriptor (value or get/set accessor, and attributes like enumerable).
Finally, there are many types of objects. You may want to handle differently an object Object from an object Date or an object Function.
So, writing your deep copy, you should answer at least those questions:
What do I consider deep (proper for recursive look up) or flat?
What properties do I want to copy? (enumerable/non-enumerable, string-keyed/symbol-keyed, own properties/prototype's own properties, values/descriptors...)
For my example, I consider that only the object Objects are deep, because other objects created by other constructors may not be proper for an in-depth look. Customized from this SO.
function toType(a) {
// Get fine type (object, array, function, null, error, date ...)
return ({}).toString.call(a).match(/([a-z]+)(:?\])/i)[1];
}
function isDeepObject(obj) {
return "Object" === toType(obj);
}
And I made an options object to choose what to copy (for demo purpose).
const options = {nonEnum:true, symbols:true, descriptors: true, proto:true};
Proposed function
You can test it in this plunker.
function deepAssign(options) {
return function deepAssignWithOptions (target, ...sources) {
sources.forEach( (source) => {
if (!isDeepObject(source) || !isDeepObject(target))
return;
// Copy source's own properties into target's own properties
function copyProperty(property) {
const descriptor = Object.getOwnPropertyDescriptor(source, property);
//default: omit non-enumerable properties
if (descriptor.enumerable || options.nonEnum) {
// Copy in-depth first
if (isDeepObject(source[property]) && isDeepObject(target[property]))
descriptor.value = deepAssign(options)(target[property], source[property]);
//default: omit descriptors
if (options.descriptors)
Object.defineProperty(target, property, descriptor); // shallow copy descriptor
else
target[property] = descriptor.value; // shallow copy value only
}
}
// Copy string-keyed properties
Object.getOwnPropertyNames(source).forEach(copyProperty);
//default: omit symbol-keyed properties
if (options.symbols)
Object.getOwnPropertySymbols(source).forEach(copyProperty);
//default: omit prototype's own properties
if (options.proto)
// Copy souce prototype's own properties into target prototype's own properties
deepAssign(Object.assign({},options,{proto:false})) (// Prevent deeper copy of the prototype chain
Object.getPrototypeOf(target),
Object.getPrototypeOf(source)
);
});
return target;
}
}
That can be used like this:
const x = { a: { a: 1 } },
y = { a: { b: 1 } };
deepAssign(options)(x,y); // { a: { a: 1, b: 1 } }
If you want to have a one liner without requiring a huge library like lodash, I suggest you to use deepmerge (npm install deepmerge) or deepmerge-ts (npm install deepmerge-ts).
deepmerge also comes with typings for TypeScript and is more stable (since it's older), but deepmerge-ts is also available for Deno and is faster by design, although written in TypeScript as the name implies.
Once imported you can do
deepmerge({ a: 1, b: 2, c: 3 }, { a: 2, d: 3 });
to get
{ a: 2, b: 2, c: 3, d: 3 }
This works nicely with complex objects and arrays. A real all-rounder solution this is.
I use lodash:
import _ = require('lodash');
value = _.merge(value1, value2);
Many answers use tens of lines of code, or require adding a new library to the project, but if you use recursion, this is just 4 lines of code.
function merge(current, updates) {
for (key of Object.keys(updates)) {
if (!current.hasOwnProperty(key) || typeof updates[key] !== 'object') current[key] = updates[key];
else merge(current[key], updates[key]);
}
return current;
}
console.log(merge({ a: { a: 1 } }, { a: { b: 1 } }));
Arrays handling: The above version overwrites old array values with new ones. If you want it to keep the old array values and add the new ones, just add a else if (current[key] instanceof Array && updates[key] instanceof Array) current[key] = current[key].concat(updates[key]) block above the else statament and you're all set.
Here, straight forward;
a simple solution that works like Object.assign just deep, and works for an array, without any modification.
function deepAssign(target, ...sources) {
for (source of sources) {
for (let k in source) {
let vs = source[k], vt = target[k]
if (Object(vs) == vs && Object(vt) === vt) {
target[k] = deepAssign(vt, vs)
continue
}
target[k] = source[k]
}
}
return target
}
x = { a: { a: 1 }, b: [1,2] }
y = { a: { b: 1 }, b: [3] }
z = { c: 3, b: [,,,4] }
x = deepAssign(x, y, z)
console.log(JSON.stringify(x) === JSON.stringify({
"a": {
"a": 1,
"b": 1
},
"b": [ 1, 2, null, 4 ],
"c": 3
}))
Edit:
I answer somewhere else about a new method for deep comparing 2 objects.
that method can use also for a deep merging. If you want implantation put a comment
https://stackoverflow.com/a/71177790/1919821
Here is TypeScript implementation:
export const mergeObjects = <T extends object = object>(target: T, ...sources: T[]): T => {
if (!sources.length) {
return target;
}
const source = sources.shift();
if (source === undefined) {
return target;
}
if (isMergebleObject(target) && isMergebleObject(source)) {
Object.keys(source).forEach(function(key: string) {
if (isMergebleObject(source[key])) {
if (!target[key]) {
target[key] = {};
}
mergeObjects(target[key], source[key]);
} else {
target[key] = source[key];
}
});
}
return mergeObjects(target, ...sources);
};
const isObject = (item: any): boolean => {
return item !== null && typeof item === 'object';
};
const isMergebleObject = (item): boolean => {
return isObject(item) && !Array.isArray(item);
};
And Unit Tests:
describe('merge', () => {
it('should merge Objects and all nested Ones', () => {
const obj1 = { a: { a1: 'A1'}, c: 'C', d: {} };
const obj2 = { a: { a2: 'A2'}, b: { b1: 'B1'}, d: null };
const obj3 = { a: { a1: 'A1', a2: 'A2'}, b: { b1: 'B1'}, c: 'C', d: null};
expect(mergeObjects({}, obj1, obj2)).toEqual(obj3);
});
it('should behave like Object.assign on the top level', () => {
const obj1 = { a: { a1: 'A1'}, c: 'C'};
const obj2 = { a: undefined, b: { b1: 'B1'}};
expect(mergeObjects({}, obj1, obj2)).toEqual(Object.assign({}, obj1, obj2));
});
it('should not merge array values, just override', () => {
const obj1 = {a: ['A', 'B']};
const obj2 = {a: ['C'], b: ['D']};
expect(mergeObjects({}, obj1, obj2)).toEqual({a: ['C'], b: ['D']});
});
it('typed merge', () => {
expect(mergeObjects<TestPosition>(new TestPosition(0, 0), new TestPosition(1, 1)))
.toEqual(new TestPosition(1, 1));
});
});
class TestPosition {
constructor(public x: number = 0, public y: number = 0) {/*empty*/}
}
The deepmerge npm package appears to be the most widely used library for solving this problem:
https://www.npmjs.com/package/deepmerge
I would like to present a pretty simple ES5 alternative. The function gets 2 parameters - target and source that must be of type "object". Target will be the resulting object. Target keeps all its original properties but their values may be modified though.
function deepMerge(target, source) {
if(typeof target !== 'object' || typeof source !== 'object') return false; // target or source or both ain't objects, merging doesn't make sense
for(var prop in source) {
if(!source.hasOwnProperty(prop)) continue; // take into consideration only object's own properties.
if(prop in target) { // handling merging of two properties with equal names
if(typeof target[prop] !== 'object') {
target[prop] = source[prop];
} else {
if(typeof source[prop] !== 'object') {
target[prop] = source[prop];
} else {
if(target[prop].concat && source[prop].concat) { // two arrays get concatenated
target[prop] = target[prop].concat(source[prop]);
} else { // two objects get merged recursively
target[prop] = deepMerge(target[prop], source[prop]);
}
}
}
} else { // new properties get added to target
target[prop] = source[prop];
}
}
return target;
}
cases:
if target doesn't have a source property, target gets it;
if target does have a source property and target & source are not
both objects (3 cases out of 4), target's property gets overriden;
if target does have a source property and both of them are objects/arrays (1 remaining case), then recursion happens merging two objects (or concatenation of two arrays);
also consider the following:
array + obj = array
obj + array = obj
obj + obj = obj (recursively merged)
array + array = array (concat)
It is predictable, supports primitive types as well as arrays and objects. Also as we can merge 2 objects, I think that we can merge more than 2 via reduce function.
take a look at an example (and play around with it if you want):
var a = {
"a_prop": 1,
"arr_prop": [4, 5, 6],
"obj": {
"a_prop": {
"t_prop": 'test'
},
"b_prop": 2
}
};
var b = {
"a_prop": 5,
"arr_prop": [7, 8, 9],
"b_prop": 15,
"obj": {
"a_prop": {
"u_prop": false
},
"b_prop": {
"s_prop": null
}
}
};
function deepMerge(target, source) {
if(typeof target !== 'object' || typeof source !== 'object') return false;
for(var prop in source) {
if(!source.hasOwnProperty(prop)) continue;
if(prop in target) {
if(typeof target[prop] !== 'object') {
target[prop] = source[prop];
} else {
if(typeof source[prop] !== 'object') {
target[prop] = source[prop];
} else {
if(target[prop].concat && source[prop].concat) {
target[prop] = target[prop].concat(source[prop]);
} else {
target[prop] = deepMerge(target[prop], source[prop]);
}
}
}
} else {
target[prop] = source[prop];
}
}
return target;
}
console.log(deepMerge(a, b));
There is a limitation - browser's call stack length. Modern browsers will throw an error at some really deep level of recursion (think of thousands of nested calls). Also you are free to treat situations like array + object etc. as you wish by adding new conditions and type checks.
A simple solution with ES5 (overwrite existing value):
function merge(current, update) {
Object.keys(update).forEach(function(key) {
// if update[key] exist, and it's not a string or array,
// we go in one level deeper
if (current.hasOwnProperty(key)
&& typeof current[key] === 'object'
&& !(current[key] instanceof Array)) {
merge(current[key], update[key]);
// if update[key] doesn't exist in current, or it's a string
// or array, then assign/overwrite current[key] to update[key]
} else {
current[key] = update[key];
}
});
return current;
}
var x = { a: { a: 1 } }
var y = { a: { b: 1 } }
console.log(merge(x, y));
Is there a way to do this?
If npm libraries can be used as a solution, object-merge-advanced from yours truly allows to merge objects deeply and customise/override every single merge action using a familiar callback function. The main idea of it is more than just deep merging — what happens with the value when two keys are the same? This library takes care of that — when two keys clash, object-merge-advanced weighs the types, aiming to retain as much data as possible after merging:
First input argument's key is marked #1, second argument's — #2. Depending on each type, one is chosen for the result key's value. In diagram, "an object" means a plain object (not array etc).
When keys don't clash, they all enter the result.
From your example snippet, if you used object-merge-advanced to merge your code snippet:
const mergeObj = require("object-merge-advanced");
const x = { a: { a: 1 } };
const y = { a: { b: 1 } };
const res = console.log(mergeObj(x, y));
// => res = {
// a: {
// a: 1,
// b: 1
// }
// }
It's algorithm recursively traverses all input object keys, compares and builds and returns the new merged result.
The following function makes a deep copy of objects, it covers copying primitive, arrays as well as object
function mergeDeep (target, source) {
if (typeof target == "object" && typeof source == "object") {
for (const key in source) {
if (source[key] === null && (target[key] === undefined || target[key] === null)) {
target[key] = null;
} else if (source[key] instanceof Array) {
if (!target[key]) target[key] = [];
//concatenate arrays
target[key] = target[key].concat(source[key]);
} else if (typeof source[key] == "object") {
if (!target[key]) target[key] = {};
this.mergeDeep(target[key], source[key]);
} else {
target[key] = source[key];
}
}
}
return target;
}
Most examples here seem too complex, I'm using one in TypeScript I created, I think it should cover most cases (I'm handling arrays as regular data, just replacing them).
const isObject = (item: any) => typeof item === 'object' && !Array.isArray(item);
export const merge = <A = Object, B = Object>(target: A, source: B): A & B => {
const isDeep = (prop: string) =>
isObject(source[prop]) && target.hasOwnProperty(prop) && isObject(target[prop]);
const replaced = Object.getOwnPropertyNames(source)
.map(prop => ({ [prop]: isDeep(prop) ? merge(target[prop], source[prop]) : source[prop] }))
.reduce((a, b) => ({ ...a, ...b }), {});
return {
...(target as Object),
...(replaced as Object)
} as A & B;
};
Same thing in plain JS, just in case:
const isObject = item => typeof item === 'object' && !Array.isArray(item);
const merge = (target, source) => {
const isDeep = prop =>
isObject(source[prop]) && target.hasOwnProperty(prop) && isObject(target[prop]);
const replaced = Object.getOwnPropertyNames(source)
.map(prop => ({ [prop]: isDeep(prop) ? merge(target[prop], source[prop]) : source[prop] }))
.reduce((a, b) => ({ ...a, ...b }), {});
return {
...target,
...replaced
};
};
Here are my test cases to show how you could use it
describe('merge', () => {
context('shallow merges', () => {
it('merges objects', () => {
const a = { a: 'discard' };
const b = { a: 'test' };
expect(merge(a, b)).to.deep.equal({ a: 'test' });
});
it('extends objects', () => {
const a = { a: 'test' };
const b = { b: 'test' };
expect(merge(a, b)).to.deep.equal({ a: 'test', b: 'test' });
});
it('extends a property with an object', () => {
const a = { a: 'test' };
const b = { b: { c: 'test' } };
expect(merge(a, b)).to.deep.equal({ a: 'test', b: { c: 'test' } });
});
it('replaces a property with an object', () => {
const a = { b: 'whatever', a: 'test' };
const b = { b: { c: 'test' } };
expect(merge(a, b)).to.deep.equal({ a: 'test', b: { c: 'test' } });
});
});
context('deep merges', () => {
it('merges objects', () => {
const a = { test: { a: 'discard', b: 'test' } };
const b = { test: { a: 'test' } } ;
expect(merge(a, b)).to.deep.equal({ test: { a: 'test', b: 'test' } });
});
it('extends objects', () => {
const a = { test: { a: 'test' } };
const b = { test: { b: 'test' } };
expect(merge(a, b)).to.deep.equal({ test: { a: 'test', b: 'test' } });
});
it('extends a property with an object', () => {
const a = { test: { a: 'test' } };
const b = { test: { b: { c: 'test' } } };
expect(merge(a, b)).to.deep.equal({ test: { a: 'test', b: { c: 'test' } } });
});
it('replaces a property with an object', () => {
const a = { test: { b: 'whatever', a: 'test' } };
const b = { test: { b: { c: 'test' } } };
expect(merge(a, b)).to.deep.equal({ test: { a: 'test', b: { c: 'test' } } });
});
});
});
Please let me know if you think I'm missing some functionality.
We can use $.extend(true,object1,object2) for deep merging. Value true denotes merge two objects recursively, modifying the first.
$extend(true,target,object)
If your are using ImmutableJS you can use mergeDeep :
fromJS(options).mergeDeep(options2).toJS();
Ramda which is a nice library of javascript functions has mergeDeepLeft and mergeDeepRight. Any of these work pretty well for this problem. Please take a look on the documentation here: https://ramdajs.com/docs/#mergeDeepLeft
For the specific example in question we can use:
import { mergeDeepLeft } from 'ramda'
const x = { a: { a: 1 } }
const y = { a: { b: 1 } }
const z = mergeDeepLeft(x, y)) // {"a":{"a":1,"b":1}}
with reduce
export const merge = (objFrom, objTo) => Object.keys(objFrom)
.reduce(
(merged, key) => {
merged[key] = objFrom[key] instanceof Object && !Array.isArray(objFrom[key])
? merge(objFrom[key], merged[key] ?? {})
: objFrom[key]
return merged
}, { ...objTo }
)
test('merge', async () => {
const obj1 = { par1: -1, par2: { par2_1: -21, par2_5: -25 }, arr: [0,1,2] }
const obj2 = { par1: 1, par2: { par2_1: 21 }, par3: 3, arr: [3,4,5] }
const obj3 = merge3(obj1, obj2)
expect(obj3).toEqual(
{ par1: -1, par2: { par2_1: -21, par2_5: -25 }, par3: 3, arr: [0,1,2] }
)
})
I was having this issue when loading a cached redux state. If I just load the cached state, I'd run into errors for new app version with an updated state structure.
It was already mentioned, that lodash offers the merge function, which I used:
const currentInitialState = configureState().getState();
const mergedState = _.merge({}, currentInitialState, cachedState);
const store = configureState(mergedState);
I didn't like any of the existing solutions. So, I went ahead and wrote my own.
Object.prototype.merge = function(object) {
for (const key in object) {
if (object.hasOwnProperty(key)) {
if (typeof this[key] === "object" && typeof object[key] === "object") {
this[key].merge(object[key]);
continue;
}
this[key] = object[key];
}
}
return this;
}
I hope this helps those of you who struggle to understand what's going on. I've seen a lot of meaningless variables being used on here.
Thanks
Here's another one I just wrote that supports arrays. It concats them.
function isObject(obj) {
return obj !== null && typeof obj === 'object';
}
function isPlainObject(obj) {
return isObject(obj) && (
obj.constructor === Object // obj = {}
|| obj.constructor === undefined // obj = Object.create(null)
);
}
function mergeDeep(target, ...sources) {
if (!sources.length) return target;
const source = sources.shift();
if(Array.isArray(target)) {
if(Array.isArray(source)) {
target.push(...source);
} else {
target.push(source);
}
} else if(isPlainObject(target)) {
if(isPlainObject(source)) {
for(let key of Object.keys(source)) {
if(!target[key]) {
target[key] = source[key];
} else {
mergeDeep(target[key], source[key]);
}
}
} else {
throw new Error(`Cannot merge object with non-object`);
}
} else {
target = source;
}
return mergeDeep(target, ...sources);
};
Use this function:
merge(target, source, mutable = false) {
const newObj = typeof target == 'object' ? (mutable ? target : Object.assign({}, target)) : {};
for (const prop in source) {
if (target[prop] == null || typeof target[prop] === 'undefined') {
newObj[prop] = source[prop];
} else if (Array.isArray(target[prop])) {
newObj[prop] = source[prop] || target[prop];
} else if (target[prop] instanceof RegExp) {
newObj[prop] = source[prop] || target[prop];
} else {
newObj[prop] = typeof source[prop] === 'object' ? this.merge(target[prop], source[prop]) : source[prop];
}
}
return newObj;
}
This is a cheap deep merge that uses as little code as I could think of. Each source overwrites the previous property when it exists.
const { keys } = Object;
const isObject = a => typeof a === "object" && !Array.isArray(a);
const merge = (a, b) =>
isObject(a) && isObject(b)
? deepMerge(a, b)
: isObject(a) && !isObject(b)
? a
: b;
const coalesceByKey = source => (acc, key) =>
(acc[key] && source[key]
? (acc[key] = merge(acc[key], source[key]))
: (acc[key] = source[key])) && acc;
/**
* Merge all sources into the target
* overwriting primitive values in the the accumulated target as we go (if they already exist)
* #param {*} target
* #param {...any} sources
*/
const deepMerge = (target, ...sources) =>
sources.reduce(
(acc, source) => keys(source).reduce(coalesceByKey(source), acc),
target
);
console.log(deepMerge({ a: 1 }, { a: 2 }));
console.log(deepMerge({ a: 1 }, { a: { b: 2 } }));
console.log(deepMerge({ a: { b: 2 } }, { a: 1 }));
// copies all properties from source object to dest object recursively
export function recursivelyMoveProperties(source, dest) {
for (const prop in source) {
if (!source.hasOwnProperty(prop)) {
continue;
}
if (source[prop] === null) {
// property is null
dest[prop] = source[prop];
continue;
}
if (typeof source[prop] === 'object') {
// if property is object let's dive into in
if (Array.isArray(source[prop])) {
dest[prop] = [];
} else {
if (!dest.hasOwnProperty(prop)
|| typeof dest[prop] !== 'object'
|| dest[prop] === null || Array.isArray(dest[prop])
|| !Object.keys(dest[prop]).length) {
dest[prop] = {};
}
}
recursivelyMoveProperties(source[prop], dest[prop]);
continue;
}
// property is simple type: string, number, e.t.c
dest[prop] = source[prop];
}
return dest;
}
Unit test:
describe('recursivelyMoveProperties', () => {
it('should copy properties correctly', () => {
const source: any = {
propS1: 'str1',
propS2: 'str2',
propN1: 1,
propN2: 2,
propA1: [1, 2, 3],
propA2: [],
propB1: true,
propB2: false,
propU1: null,
propU2: null,
propD1: undefined,
propD2: undefined,
propO1: {
subS1: 'sub11',
subS2: 'sub12',
subN1: 11,
subN2: 12,
subA1: [11, 12, 13],
subA2: [],
subB1: false,
subB2: true,
subU1: null,
subU2: null,
subD1: undefined,
subD2: undefined,
},
propO2: {
subS1: 'sub21',
subS2: 'sub22',
subN1: 21,
subN2: 22,
subA1: [21, 22, 23],
subA2: [],
subB1: false,
subB2: true,
subU1: null,
subU2: null,
subD1: undefined,
subD2: undefined,
},
};
let dest: any = {
propS2: 'str2',
propS3: 'str3',
propN2: -2,
propN3: 3,
propA2: [2, 2],
propA3: [3, 2, 1],
propB2: true,
propB3: false,
propU2: 'not null',
propU3: null,
propD2: 'defined',
propD3: undefined,
propO2: {
subS2: 'inv22',
subS3: 'sub23',
subN2: -22,
subN3: 23,
subA2: [5, 5, 5],
subA3: [31, 32, 33],
subB2: false,
subB3: true,
subU2: 'not null --- ',
subU3: null,
subD2: ' not undefined ----',
subD3: undefined,
},
propO3: {
subS1: 'sub31',
subS2: 'sub32',
subN1: 31,
subN2: 32,
subA1: [31, 32, 33],
subA2: [],
subB1: false,
subB2: true,
subU1: null,
subU2: null,
subD1: undefined,
subD2: undefined,
},
};
dest = recursivelyMoveProperties(source, dest);
expect(dest).toEqual({
propS1: 'str1',
propS2: 'str2',
propS3: 'str3',
propN1: 1,
propN2: 2,
propN3: 3,
propA1: [1, 2, 3],
propA2: [],
propA3: [3, 2, 1],
propB1: true,
propB2: false,
propB3: false,
propU1: null,
propU2: null,
propU3: null,
propD1: undefined,
propD2: undefined,
propD3: undefined,
propO1: {
subS1: 'sub11',
subS2: 'sub12',
subN1: 11,
subN2: 12,
subA1: [11, 12, 13],
subA2: [],
subB1: false,
subB2: true,
subU1: null,
subU2: null,
subD1: undefined,
subD2: undefined,
},
propO2: {
subS1: 'sub21',
subS2: 'sub22',
subS3: 'sub23',
subN1: 21,
subN2: 22,
subN3: 23,
subA1: [21, 22, 23],
subA2: [],
subA3: [31, 32, 33],
subB1: false,
subB2: true,
subB3: true,
subU1: null,
subU2: null,
subU3: null,
subD1: undefined,
subD2: undefined,
subD3: undefined,
},
propO3: {
subS1: 'sub31',
subS2: 'sub32',
subN1: 31,
subN2: 32,
subA1: [31, 32, 33],
subA2: [],
subB1: false,
subB2: true,
subU1: null,
subU2: null,
subD1: undefined,
subD2: undefined,
},
});
});
});
Another variation using recursion, hope you find it useful.
const merge = (obj1, obj2) => {
const recursiveMerge = (obj, entries) => {
for (const [key, value] of entries) {
if (typeof value === "object") {
obj[key] = obj[key] ? {...obj[key]} : {};
recursiveMerge(obj[key], Object.entries(value))
else {
obj[key] = value;
}
}
return obj;
}
return recursiveMerge(obj1, Object.entries(obj2))
}
My use case for this was to merge default values into a configuration. If my component accepts a configuration object that has a deeply nested structure, and my component defines a default configuration, I wanted to set default values in my configuration for all configuration options that were not supplied.
Example usage:
export default MyComponent = ({config}) => {
const mergedConfig = mergeDefaults(config, {header:{margins:{left:10, top: 10}}});
// Component code here
}
This allows me to pass an empty or null config, or a partial config and have all of the values that are not configured fall back to their default values.
My implementation of mergeDefaults looks like this:
export default function mergeDefaults(config, defaults) {
if (config === null || config === undefined) return defaults;
for (var attrname in defaults) {
if (defaults[attrname].constructor === Object) config[attrname] = mergeDefaults(config[attrname], defaults[attrname]);
else if (config[attrname] === undefined) config[attrname] = defaults[attrname];
}
return config;
}
And these are my unit tests
import '#testing-library/jest-dom/extend-expect';
import mergeDefaults from './mergeDefaults';
describe('mergeDefaults', () => {
it('should create configuration', () => {
const config = mergeDefaults(null, { a: 10, b: { c: 'default1', d: 'default2' } });
expect(config.a).toStrictEqual(10);
expect(config.b.c).toStrictEqual('default1');
expect(config.b.d).toStrictEqual('default2');
});
it('should fill configuration', () => {
const config = mergeDefaults({}, { a: 10, b: { c: 'default1', d: 'default2' } });
expect(config.a).toStrictEqual(10);
expect(config.b.c).toStrictEqual('default1');
expect(config.b.d).toStrictEqual('default2');
});
it('should not overwrite configuration', () => {
const config = mergeDefaults({ a: 12, b: { c: 'config1', d: 'config2' } }, { a: 10, b: { c: 'default1', d: 'default2' } });
expect(config.a).toStrictEqual(12);
expect(config.b.c).toStrictEqual('config1');
expect(config.b.d).toStrictEqual('config2');
});
it('should merge configuration', () => {
const config = mergeDefaults({ a: 12, b: { d: 'config2' } }, { a: 10, b: { c: 'default1', d: 'default2' }, e: 15 });
expect(config.a).toStrictEqual(12);
expect(config.b.c).toStrictEqual('default1');
expect(config.b.d).toStrictEqual('config2');
expect(config.e).toStrictEqual(15);
});
});
Sometimes you don't need deep merge, even if you think so. For example, if you have a default config with nested objects and you want to extend it deeply with your own config, you can create a class for that. The concept is very simple:
function AjaxConfig(config) {
// Default values + config
Object.assign(this, {
method: 'POST',
contentType: 'text/plain'
}, config);
// Default values in nested objects
this.headers = Object.assign({}, this.headers, {
'X-Requested-With': 'custom'
});
}
// Define your config
var config = {
url: 'https://google.com',
headers: {
'x-client-data': 'CI22yQEI'
}
};
// Extend the default values with your own
var fullMergedConfig = new AjaxConfig(config);
// View in DevTools
console.log(fullMergedConfig);
You can convert it to a function (not a constructor).

Over-ride multi-dimensional array [duplicate]

Both Object.assign and Object spread only do a shallow merge.
An example of the problem:
// No object nesting
const x = { a: 1 }
const y = { b: 1 }
const z = { ...x, ...y } // { a: 1, b: 1 }
The output is what you'd expect. However if I try this:
// Object nesting
const x = { a: { a: 1 } }
const y = { a: { b: 1 } }
const z = { ...x, ...y } // { a: { b: 1 } }
Instead of
{ a: { a: 1, b: 1 } }
you get
{ a: { b: 1 } }
x is completely overwritten because the spread syntax only goes one level deep. This is the same with Object.assign().
Is there a way to do this?
I know this is a bit of an old issue but the easiest solution in ES2015/ES6 I could come up with was actually quite simple, using Object.assign(),
Hopefully this helps:
/**
* Simple object check.
* #param item
* #returns {boolean}
*/
export function isObject(item) {
return (item && typeof item === 'object' && !Array.isArray(item));
}
/**
* Deep merge two objects.
* #param target
* #param ...sources
*/
export function mergeDeep(target, ...sources) {
if (!sources.length) return target;
const source = sources.shift();
if (isObject(target) && isObject(source)) {
for (const key in source) {
if (isObject(source[key])) {
if (!target[key]) Object.assign(target, { [key]: {} });
mergeDeep(target[key], source[key]);
} else {
Object.assign(target, { [key]: source[key] });
}
}
}
return mergeDeep(target, ...sources);
}
Example usage:
mergeDeep(this, { a: { b: { c: 123 } } });
// or
const merged = mergeDeep({a: 1}, { b : { c: { d: { e: 12345}}}});
console.dir(merged); // { a: 1, b: { c: { d: [Object] } } }
You'll find an immutable version of this in the answer below.
Note that this will lead to infinite recursion on circular references. There's some great answers on here on how to detect circular references if you think you'd face this issue.
You can use Lodash merge:
var object = {
'a': [{ 'b': 2 }, { 'd': 4 }]
};
var other = {
'a': [{ 'c': 3 }, { 'e': 5 }]
};
console.log(_.merge(object, other));
// => { 'a': [{ 'b': 2, 'c': 3 }, { 'd': 4, 'e': 5 }] }
<script src="https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.21/lodash.min.js"></script>
The problem is non-trivial when it comes to host objects or any kind of object that's more complex than a bag of values
do you invoke a getter to obtain a value or do you copy over the property descriptor?
what if the merge target has a setter (either own property or in its prototype chain)? Do you consider the value as already-present or call the setter to update the current value?
do you invoke own-property functions or copy them over? What if they're bound functions or arrow functions depending on something in their scope chain at the time they were defined?
what if it's something like a DOM node? You certainly don't want to treat it as simple object and just deep-merge all its properties over into
how to deal with "simple" structures like arrays or maps or sets? Consider them already-present or merge them too?
how to deal with non-enumerable own properties?
what about new subtrees? Simply assign by reference or deep clone?
how to deal with frozen/sealed/non-extensible objects?
Another thing to keep in mind: Object graphs that contain cycles. It's usually not difficult to deal with - simply keep a Set of already-visited source objects - but often forgotten.
You probably should write a deep-merge function that only expects primitive values and simple objects - at most those types that the structured clone algorithm can handle - as merge sources. Throw if it encounters anything it cannot handle or just assign by reference instead of deep merging.
In other words, there is no one-size-fits-all algorithm, you either have to roll your own or look for a library method that happens to cover your use-cases.
Here is an immutable (does not modify the inputs) version of #Salakar's answer. Useful if you're doing functional programming type stuff.
export function isObject(item) {
return (item && typeof item === 'object' && !Array.isArray(item));
}
export default function mergeDeep(target, source) {
let output = Object.assign({}, target);
if (isObject(target) && isObject(source)) {
Object.keys(source).forEach(key => {
if (isObject(source[key])) {
if (!(key in target))
Object.assign(output, { [key]: source[key] });
else
output[key] = mergeDeep(target[key], source[key]);
} else {
Object.assign(output, { [key]: source[key] });
}
});
}
return output;
}
Update 2022:
I created mergician to address the various merge/clone requirements discussed in the comments. It is based on the same concept as my original answer (below) but offers configurable options:
Unlike native methods and other merge/clone utilities, Mergician provides advanced options for customizing the merge/clone process. These options make it easy to inspect, filter, and modify keys and properties; merge or skip unique, common, and universal keys (i.e., intersections, unions, and differences); and merge, sort, and remove duplicates from arrays. Property accessors and descriptors are also handled properly, ensuring that getter/setter functions are retained and descriptor values are defined on new merged/cloned objects.
Notably, mergician is significantly smaller (1.5k min+gzip) than similar utilities like lodash.merge (5.1k min+gzip).
GitHub: https://github.com/jhildenbiddle/mergician
NPM: https://www.npmjs.com/package/mergician
Docs: https://jhildenbiddle.github.io/mergician/
Original answer:
Since this issue is still active, here's another approach:
ES6/2015
Immutable (does not modify original objects)
Handles arrays (concatenates them)
/**
* Performs a deep merge of objects and returns new object. Does not modify
* objects (immutable) and merges arrays via concatenation.
*
* #param {...object} objects - Objects to merge
* #returns {object} New object with merged key/values
*/
function mergeDeep(...objects) {
const isObject = obj => obj && typeof obj === 'object';
return objects.reduce((prev, obj) => {
Object.keys(obj).forEach(key => {
const pVal = prev[key];
const oVal = obj[key];
if (Array.isArray(pVal) && Array.isArray(oVal)) {
prev[key] = pVal.concat(...oVal);
}
else if (isObject(pVal) && isObject(oVal)) {
prev[key] = mergeDeep(pVal, oVal);
}
else {
prev[key] = oVal;
}
});
return prev;
}, {});
}
// Test objects
const obj1 = {
a: 1,
b: 1,
c: { x: 1, y: 1 },
d: [ 1, 1 ]
}
const obj2 = {
b: 2,
c: { y: 2, z: 2 },
d: [ 2, 2 ],
e: 2
}
const obj3 = mergeDeep(obj1, obj2);
// Out
console.log(obj3);
I know there's a lot of answers already and as many comments arguing they won't work. The only consensus is that it's so complicated that nobody made a standard for it. However, most of accepted answers in SO expose "simple tricks" that are widely used. So, for all of us like me who are no experts but want to write safer code by grasping a little more about javascript's complexity, I'll try to shed some light.
Before getting our hands dirty, let me clarify 2 points:
[DISCLAIMER] I propose a function below that tackles how we deep loop into javascript objects for copy and illustrates what is generally too shortly commented. It is not production-ready. For sake of clarity, I have purposedly left aside other considerations like circular objects (track by a set or unconflicting symbol property), copying reference value or deep clone, immutable destination object (deep clone again?), case-by-case study of each type of objects, get/set properties via accessors... Also, I did not test performance -although it's important- because it's not the point here either.
I'll use copy or assign terms instead of merge. Because in my mind a merge is conservative and should fail upon conflicts. Here, when conflicting, we want the source to overwrite the destination. Like Object.assign does.
Answers with for..in or Object.keys are misleading
Making a deep copy seems so basic and common practice that we expect to find a one-liner or, at least, a quick win via simple recursion. We don't expect we should need a library or write a custom function of 100 lines.
When I first read Salakar's answer, I genuinely thought I could do better and simpler (you can compare it with Object.assign on x={a:1}, y={a:{b:1}}). Then I read the8472's answer and I thought... there is no getting away so easily, improving already given answers won't get us far.
Let's let deep copy and recursive aside an instant. Just consider how (wrongly) people parse properties to copy a very simple object.
const y = Object.create(
{ proto : 1 },
{ a: { enumerable: true, value: 1},
[Symbol('b')] : { enumerable: true, value: 1} } )
Object.assign({},y)
> { 'a': 1, Symbol(b): 1 } // All (enumerable) properties are copied
((x,y) => Object.keys(y).reduce((acc,k) => Object.assign(acc, { [k]: y[k] }), x))({},y)
> { 'a': 1 } // Missing a property!
((x,y) => {for (let k in y) x[k]=y[k];return x})({},y)
> { 'a': 1, 'proto': 1 } // Missing a property! Prototype's property is copied too!
Object.keys will omit own non-enumerable properties, own symbol-keyed properties and all prototype's properties. It may be fine if your objects don't have any of those. But keep it mind that Object.assign handles own symbol-keyed enumerable properties. So your custom copy lost its bloom.
for..in will provide properties of the source, of its prototype and of the full prototype chain without you wanting it (or knowing it). Your target may end up with too many properties, mixing up prototype properties and own properties.
If you're writing a general purpose function and you're not using Object.getOwnPropertyDescriptors, Object.getOwnPropertyNames, Object.getOwnPropertySymbols or Object.getPrototypeOf, you're most probably doing it wrong.
Things to consider before writing your function
First, make sure you understand what a Javascript object is. In Javascript, an object is made of its own properties and a (parent) prototype object. The prototype object in turn is made of its own properties and a prototype object. And so on, defining a prototype chain.
A property is a pair of key (string or symbol) and descriptor (value or get/set accessor, and attributes like enumerable).
Finally, there are many types of objects. You may want to handle differently an object Object from an object Date or an object Function.
So, writing your deep copy, you should answer at least those questions:
What do I consider deep (proper for recursive look up) or flat?
What properties do I want to copy? (enumerable/non-enumerable, string-keyed/symbol-keyed, own properties/prototype's own properties, values/descriptors...)
For my example, I consider that only the object Objects are deep, because other objects created by other constructors may not be proper for an in-depth look. Customized from this SO.
function toType(a) {
// Get fine type (object, array, function, null, error, date ...)
return ({}).toString.call(a).match(/([a-z]+)(:?\])/i)[1];
}
function isDeepObject(obj) {
return "Object" === toType(obj);
}
And I made an options object to choose what to copy (for demo purpose).
const options = {nonEnum:true, symbols:true, descriptors: true, proto:true};
Proposed function
You can test it in this plunker.
function deepAssign(options) {
return function deepAssignWithOptions (target, ...sources) {
sources.forEach( (source) => {
if (!isDeepObject(source) || !isDeepObject(target))
return;
// Copy source's own properties into target's own properties
function copyProperty(property) {
const descriptor = Object.getOwnPropertyDescriptor(source, property);
//default: omit non-enumerable properties
if (descriptor.enumerable || options.nonEnum) {
// Copy in-depth first
if (isDeepObject(source[property]) && isDeepObject(target[property]))
descriptor.value = deepAssign(options)(target[property], source[property]);
//default: omit descriptors
if (options.descriptors)
Object.defineProperty(target, property, descriptor); // shallow copy descriptor
else
target[property] = descriptor.value; // shallow copy value only
}
}
// Copy string-keyed properties
Object.getOwnPropertyNames(source).forEach(copyProperty);
//default: omit symbol-keyed properties
if (options.symbols)
Object.getOwnPropertySymbols(source).forEach(copyProperty);
//default: omit prototype's own properties
if (options.proto)
// Copy souce prototype's own properties into target prototype's own properties
deepAssign(Object.assign({},options,{proto:false})) (// Prevent deeper copy of the prototype chain
Object.getPrototypeOf(target),
Object.getPrototypeOf(source)
);
});
return target;
}
}
That can be used like this:
const x = { a: { a: 1 } },
y = { a: { b: 1 } };
deepAssign(options)(x,y); // { a: { a: 1, b: 1 } }
If you want to have a one liner without requiring a huge library like lodash, I suggest you to use deepmerge (npm install deepmerge) or deepmerge-ts (npm install deepmerge-ts).
deepmerge also comes with typings for TypeScript and is more stable (since it's older), but deepmerge-ts is also available for Deno and is faster by design, although written in TypeScript as the name implies.
Once imported you can do
deepmerge({ a: 1, b: 2, c: 3 }, { a: 2, d: 3 });
to get
{ a: 2, b: 2, c: 3, d: 3 }
This works nicely with complex objects and arrays. A real all-rounder solution this is.
I use lodash:
import _ = require('lodash');
value = _.merge(value1, value2);
Many answers use tens of lines of code, or require adding a new library to the project, but if you use recursion, this is just 4 lines of code.
function merge(current, updates) {
for (key of Object.keys(updates)) {
if (!current.hasOwnProperty(key) || typeof updates[key] !== 'object') current[key] = updates[key];
else merge(current[key], updates[key]);
}
return current;
}
console.log(merge({ a: { a: 1 } }, { a: { b: 1 } }));
Arrays handling: The above version overwrites old array values with new ones. If you want it to keep the old array values and add the new ones, just add a else if (current[key] instanceof Array && updates[key] instanceof Array) current[key] = current[key].concat(updates[key]) block above the else statament and you're all set.
Here, straight forward;
a simple solution that works like Object.assign just deep, and works for an array, without any modification.
function deepAssign(target, ...sources) {
for (source of sources) {
for (let k in source) {
let vs = source[k], vt = target[k]
if (Object(vs) == vs && Object(vt) === vt) {
target[k] = deepAssign(vt, vs)
continue
}
target[k] = source[k]
}
}
return target
}
x = { a: { a: 1 }, b: [1,2] }
y = { a: { b: 1 }, b: [3] }
z = { c: 3, b: [,,,4] }
x = deepAssign(x, y, z)
console.log(JSON.stringify(x) === JSON.stringify({
"a": {
"a": 1,
"b": 1
},
"b": [ 1, 2, null, 4 ],
"c": 3
}))
Edit:
I answer somewhere else about a new method for deep comparing 2 objects.
that method can use also for a deep merging. If you want implantation put a comment
https://stackoverflow.com/a/71177790/1919821
Here is TypeScript implementation:
export const mergeObjects = <T extends object = object>(target: T, ...sources: T[]): T => {
if (!sources.length) {
return target;
}
const source = sources.shift();
if (source === undefined) {
return target;
}
if (isMergebleObject(target) && isMergebleObject(source)) {
Object.keys(source).forEach(function(key: string) {
if (isMergebleObject(source[key])) {
if (!target[key]) {
target[key] = {};
}
mergeObjects(target[key], source[key]);
} else {
target[key] = source[key];
}
});
}
return mergeObjects(target, ...sources);
};
const isObject = (item: any): boolean => {
return item !== null && typeof item === 'object';
};
const isMergebleObject = (item): boolean => {
return isObject(item) && !Array.isArray(item);
};
And Unit Tests:
describe('merge', () => {
it('should merge Objects and all nested Ones', () => {
const obj1 = { a: { a1: 'A1'}, c: 'C', d: {} };
const obj2 = { a: { a2: 'A2'}, b: { b1: 'B1'}, d: null };
const obj3 = { a: { a1: 'A1', a2: 'A2'}, b: { b1: 'B1'}, c: 'C', d: null};
expect(mergeObjects({}, obj1, obj2)).toEqual(obj3);
});
it('should behave like Object.assign on the top level', () => {
const obj1 = { a: { a1: 'A1'}, c: 'C'};
const obj2 = { a: undefined, b: { b1: 'B1'}};
expect(mergeObjects({}, obj1, obj2)).toEqual(Object.assign({}, obj1, obj2));
});
it('should not merge array values, just override', () => {
const obj1 = {a: ['A', 'B']};
const obj2 = {a: ['C'], b: ['D']};
expect(mergeObjects({}, obj1, obj2)).toEqual({a: ['C'], b: ['D']});
});
it('typed merge', () => {
expect(mergeObjects<TestPosition>(new TestPosition(0, 0), new TestPosition(1, 1)))
.toEqual(new TestPosition(1, 1));
});
});
class TestPosition {
constructor(public x: number = 0, public y: number = 0) {/*empty*/}
}
The deepmerge npm package appears to be the most widely used library for solving this problem:
https://www.npmjs.com/package/deepmerge
I would like to present a pretty simple ES5 alternative. The function gets 2 parameters - target and source that must be of type "object". Target will be the resulting object. Target keeps all its original properties but their values may be modified though.
function deepMerge(target, source) {
if(typeof target !== 'object' || typeof source !== 'object') return false; // target or source or both ain't objects, merging doesn't make sense
for(var prop in source) {
if(!source.hasOwnProperty(prop)) continue; // take into consideration only object's own properties.
if(prop in target) { // handling merging of two properties with equal names
if(typeof target[prop] !== 'object') {
target[prop] = source[prop];
} else {
if(typeof source[prop] !== 'object') {
target[prop] = source[prop];
} else {
if(target[prop].concat && source[prop].concat) { // two arrays get concatenated
target[prop] = target[prop].concat(source[prop]);
} else { // two objects get merged recursively
target[prop] = deepMerge(target[prop], source[prop]);
}
}
}
} else { // new properties get added to target
target[prop] = source[prop];
}
}
return target;
}
cases:
if target doesn't have a source property, target gets it;
if target does have a source property and target & source are not
both objects (3 cases out of 4), target's property gets overriden;
if target does have a source property and both of them are objects/arrays (1 remaining case), then recursion happens merging two objects (or concatenation of two arrays);
also consider the following:
array + obj = array
obj + array = obj
obj + obj = obj (recursively merged)
array + array = array (concat)
It is predictable, supports primitive types as well as arrays and objects. Also as we can merge 2 objects, I think that we can merge more than 2 via reduce function.
take a look at an example (and play around with it if you want):
var a = {
"a_prop": 1,
"arr_prop": [4, 5, 6],
"obj": {
"a_prop": {
"t_prop": 'test'
},
"b_prop": 2
}
};
var b = {
"a_prop": 5,
"arr_prop": [7, 8, 9],
"b_prop": 15,
"obj": {
"a_prop": {
"u_prop": false
},
"b_prop": {
"s_prop": null
}
}
};
function deepMerge(target, source) {
if(typeof target !== 'object' || typeof source !== 'object') return false;
for(var prop in source) {
if(!source.hasOwnProperty(prop)) continue;
if(prop in target) {
if(typeof target[prop] !== 'object') {
target[prop] = source[prop];
} else {
if(typeof source[prop] !== 'object') {
target[prop] = source[prop];
} else {
if(target[prop].concat && source[prop].concat) {
target[prop] = target[prop].concat(source[prop]);
} else {
target[prop] = deepMerge(target[prop], source[prop]);
}
}
}
} else {
target[prop] = source[prop];
}
}
return target;
}
console.log(deepMerge(a, b));
There is a limitation - browser's call stack length. Modern browsers will throw an error at some really deep level of recursion (think of thousands of nested calls). Also you are free to treat situations like array + object etc. as you wish by adding new conditions and type checks.
A simple solution with ES5 (overwrite existing value):
function merge(current, update) {
Object.keys(update).forEach(function(key) {
// if update[key] exist, and it's not a string or array,
// we go in one level deeper
if (current.hasOwnProperty(key)
&& typeof current[key] === 'object'
&& !(current[key] instanceof Array)) {
merge(current[key], update[key]);
// if update[key] doesn't exist in current, or it's a string
// or array, then assign/overwrite current[key] to update[key]
} else {
current[key] = update[key];
}
});
return current;
}
var x = { a: { a: 1 } }
var y = { a: { b: 1 } }
console.log(merge(x, y));
Is there a way to do this?
If npm libraries can be used as a solution, object-merge-advanced from yours truly allows to merge objects deeply and customise/override every single merge action using a familiar callback function. The main idea of it is more than just deep merging — what happens with the value when two keys are the same? This library takes care of that — when two keys clash, object-merge-advanced weighs the types, aiming to retain as much data as possible after merging:
First input argument's key is marked #1, second argument's — #2. Depending on each type, one is chosen for the result key's value. In diagram, "an object" means a plain object (not array etc).
When keys don't clash, they all enter the result.
From your example snippet, if you used object-merge-advanced to merge your code snippet:
const mergeObj = require("object-merge-advanced");
const x = { a: { a: 1 } };
const y = { a: { b: 1 } };
const res = console.log(mergeObj(x, y));
// => res = {
// a: {
// a: 1,
// b: 1
// }
// }
It's algorithm recursively traverses all input object keys, compares and builds and returns the new merged result.
The following function makes a deep copy of objects, it covers copying primitive, arrays as well as object
function mergeDeep (target, source) {
if (typeof target == "object" && typeof source == "object") {
for (const key in source) {
if (source[key] === null && (target[key] === undefined || target[key] === null)) {
target[key] = null;
} else if (source[key] instanceof Array) {
if (!target[key]) target[key] = [];
//concatenate arrays
target[key] = target[key].concat(source[key]);
} else if (typeof source[key] == "object") {
if (!target[key]) target[key] = {};
this.mergeDeep(target[key], source[key]);
} else {
target[key] = source[key];
}
}
}
return target;
}
Most examples here seem too complex, I'm using one in TypeScript I created, I think it should cover most cases (I'm handling arrays as regular data, just replacing them).
const isObject = (item: any) => typeof item === 'object' && !Array.isArray(item);
export const merge = <A = Object, B = Object>(target: A, source: B): A & B => {
const isDeep = (prop: string) =>
isObject(source[prop]) && target.hasOwnProperty(prop) && isObject(target[prop]);
const replaced = Object.getOwnPropertyNames(source)
.map(prop => ({ [prop]: isDeep(prop) ? merge(target[prop], source[prop]) : source[prop] }))
.reduce((a, b) => ({ ...a, ...b }), {});
return {
...(target as Object),
...(replaced as Object)
} as A & B;
};
Same thing in plain JS, just in case:
const isObject = item => typeof item === 'object' && !Array.isArray(item);
const merge = (target, source) => {
const isDeep = prop =>
isObject(source[prop]) && target.hasOwnProperty(prop) && isObject(target[prop]);
const replaced = Object.getOwnPropertyNames(source)
.map(prop => ({ [prop]: isDeep(prop) ? merge(target[prop], source[prop]) : source[prop] }))
.reduce((a, b) => ({ ...a, ...b }), {});
return {
...target,
...replaced
};
};
Here are my test cases to show how you could use it
describe('merge', () => {
context('shallow merges', () => {
it('merges objects', () => {
const a = { a: 'discard' };
const b = { a: 'test' };
expect(merge(a, b)).to.deep.equal({ a: 'test' });
});
it('extends objects', () => {
const a = { a: 'test' };
const b = { b: 'test' };
expect(merge(a, b)).to.deep.equal({ a: 'test', b: 'test' });
});
it('extends a property with an object', () => {
const a = { a: 'test' };
const b = { b: { c: 'test' } };
expect(merge(a, b)).to.deep.equal({ a: 'test', b: { c: 'test' } });
});
it('replaces a property with an object', () => {
const a = { b: 'whatever', a: 'test' };
const b = { b: { c: 'test' } };
expect(merge(a, b)).to.deep.equal({ a: 'test', b: { c: 'test' } });
});
});
context('deep merges', () => {
it('merges objects', () => {
const a = { test: { a: 'discard', b: 'test' } };
const b = { test: { a: 'test' } } ;
expect(merge(a, b)).to.deep.equal({ test: { a: 'test', b: 'test' } });
});
it('extends objects', () => {
const a = { test: { a: 'test' } };
const b = { test: { b: 'test' } };
expect(merge(a, b)).to.deep.equal({ test: { a: 'test', b: 'test' } });
});
it('extends a property with an object', () => {
const a = { test: { a: 'test' } };
const b = { test: { b: { c: 'test' } } };
expect(merge(a, b)).to.deep.equal({ test: { a: 'test', b: { c: 'test' } } });
});
it('replaces a property with an object', () => {
const a = { test: { b: 'whatever', a: 'test' } };
const b = { test: { b: { c: 'test' } } };
expect(merge(a, b)).to.deep.equal({ test: { a: 'test', b: { c: 'test' } } });
});
});
});
Please let me know if you think I'm missing some functionality.
We can use $.extend(true,object1,object2) for deep merging. Value true denotes merge two objects recursively, modifying the first.
$extend(true,target,object)
If your are using ImmutableJS you can use mergeDeep :
fromJS(options).mergeDeep(options2).toJS();
Ramda which is a nice library of javascript functions has mergeDeepLeft and mergeDeepRight. Any of these work pretty well for this problem. Please take a look on the documentation here: https://ramdajs.com/docs/#mergeDeepLeft
For the specific example in question we can use:
import { mergeDeepLeft } from 'ramda'
const x = { a: { a: 1 } }
const y = { a: { b: 1 } }
const z = mergeDeepLeft(x, y)) // {"a":{"a":1,"b":1}}
with reduce
export const merge = (objFrom, objTo) => Object.keys(objFrom)
.reduce(
(merged, key) => {
merged[key] = objFrom[key] instanceof Object && !Array.isArray(objFrom[key])
? merge(objFrom[key], merged[key] ?? {})
: objFrom[key]
return merged
}, { ...objTo }
)
test('merge', async () => {
const obj1 = { par1: -1, par2: { par2_1: -21, par2_5: -25 }, arr: [0,1,2] }
const obj2 = { par1: 1, par2: { par2_1: 21 }, par3: 3, arr: [3,4,5] }
const obj3 = merge3(obj1, obj2)
expect(obj3).toEqual(
{ par1: -1, par2: { par2_1: -21, par2_5: -25 }, par3: 3, arr: [0,1,2] }
)
})
I was having this issue when loading a cached redux state. If I just load the cached state, I'd run into errors for new app version with an updated state structure.
It was already mentioned, that lodash offers the merge function, which I used:
const currentInitialState = configureState().getState();
const mergedState = _.merge({}, currentInitialState, cachedState);
const store = configureState(mergedState);
I didn't like any of the existing solutions. So, I went ahead and wrote my own.
Object.prototype.merge = function(object) {
for (const key in object) {
if (object.hasOwnProperty(key)) {
if (typeof this[key] === "object" && typeof object[key] === "object") {
this[key].merge(object[key]);
continue;
}
this[key] = object[key];
}
}
return this;
}
I hope this helps those of you who struggle to understand what's going on. I've seen a lot of meaningless variables being used on here.
Thanks
Here's another one I just wrote that supports arrays. It concats them.
function isObject(obj) {
return obj !== null && typeof obj === 'object';
}
function isPlainObject(obj) {
return isObject(obj) && (
obj.constructor === Object // obj = {}
|| obj.constructor === undefined // obj = Object.create(null)
);
}
function mergeDeep(target, ...sources) {
if (!sources.length) return target;
const source = sources.shift();
if(Array.isArray(target)) {
if(Array.isArray(source)) {
target.push(...source);
} else {
target.push(source);
}
} else if(isPlainObject(target)) {
if(isPlainObject(source)) {
for(let key of Object.keys(source)) {
if(!target[key]) {
target[key] = source[key];
} else {
mergeDeep(target[key], source[key]);
}
}
} else {
throw new Error(`Cannot merge object with non-object`);
}
} else {
target = source;
}
return mergeDeep(target, ...sources);
};
Use this function:
merge(target, source, mutable = false) {
const newObj = typeof target == 'object' ? (mutable ? target : Object.assign({}, target)) : {};
for (const prop in source) {
if (target[prop] == null || typeof target[prop] === 'undefined') {
newObj[prop] = source[prop];
} else if (Array.isArray(target[prop])) {
newObj[prop] = source[prop] || target[prop];
} else if (target[prop] instanceof RegExp) {
newObj[prop] = source[prop] || target[prop];
} else {
newObj[prop] = typeof source[prop] === 'object' ? this.merge(target[prop], source[prop]) : source[prop];
}
}
return newObj;
}
This is a cheap deep merge that uses as little code as I could think of. Each source overwrites the previous property when it exists.
const { keys } = Object;
const isObject = a => typeof a === "object" && !Array.isArray(a);
const merge = (a, b) =>
isObject(a) && isObject(b)
? deepMerge(a, b)
: isObject(a) && !isObject(b)
? a
: b;
const coalesceByKey = source => (acc, key) =>
(acc[key] && source[key]
? (acc[key] = merge(acc[key], source[key]))
: (acc[key] = source[key])) && acc;
/**
* Merge all sources into the target
* overwriting primitive values in the the accumulated target as we go (if they already exist)
* #param {*} target
* #param {...any} sources
*/
const deepMerge = (target, ...sources) =>
sources.reduce(
(acc, source) => keys(source).reduce(coalesceByKey(source), acc),
target
);
console.log(deepMerge({ a: 1 }, { a: 2 }));
console.log(deepMerge({ a: 1 }, { a: { b: 2 } }));
console.log(deepMerge({ a: { b: 2 } }, { a: 1 }));
// copies all properties from source object to dest object recursively
export function recursivelyMoveProperties(source, dest) {
for (const prop in source) {
if (!source.hasOwnProperty(prop)) {
continue;
}
if (source[prop] === null) {
// property is null
dest[prop] = source[prop];
continue;
}
if (typeof source[prop] === 'object') {
// if property is object let's dive into in
if (Array.isArray(source[prop])) {
dest[prop] = [];
} else {
if (!dest.hasOwnProperty(prop)
|| typeof dest[prop] !== 'object'
|| dest[prop] === null || Array.isArray(dest[prop])
|| !Object.keys(dest[prop]).length) {
dest[prop] = {};
}
}
recursivelyMoveProperties(source[prop], dest[prop]);
continue;
}
// property is simple type: string, number, e.t.c
dest[prop] = source[prop];
}
return dest;
}
Unit test:
describe('recursivelyMoveProperties', () => {
it('should copy properties correctly', () => {
const source: any = {
propS1: 'str1',
propS2: 'str2',
propN1: 1,
propN2: 2,
propA1: [1, 2, 3],
propA2: [],
propB1: true,
propB2: false,
propU1: null,
propU2: null,
propD1: undefined,
propD2: undefined,
propO1: {
subS1: 'sub11',
subS2: 'sub12',
subN1: 11,
subN2: 12,
subA1: [11, 12, 13],
subA2: [],
subB1: false,
subB2: true,
subU1: null,
subU2: null,
subD1: undefined,
subD2: undefined,
},
propO2: {
subS1: 'sub21',
subS2: 'sub22',
subN1: 21,
subN2: 22,
subA1: [21, 22, 23],
subA2: [],
subB1: false,
subB2: true,
subU1: null,
subU2: null,
subD1: undefined,
subD2: undefined,
},
};
let dest: any = {
propS2: 'str2',
propS3: 'str3',
propN2: -2,
propN3: 3,
propA2: [2, 2],
propA3: [3, 2, 1],
propB2: true,
propB3: false,
propU2: 'not null',
propU3: null,
propD2: 'defined',
propD3: undefined,
propO2: {
subS2: 'inv22',
subS3: 'sub23',
subN2: -22,
subN3: 23,
subA2: [5, 5, 5],
subA3: [31, 32, 33],
subB2: false,
subB3: true,
subU2: 'not null --- ',
subU3: null,
subD2: ' not undefined ----',
subD3: undefined,
},
propO3: {
subS1: 'sub31',
subS2: 'sub32',
subN1: 31,
subN2: 32,
subA1: [31, 32, 33],
subA2: [],
subB1: false,
subB2: true,
subU1: null,
subU2: null,
subD1: undefined,
subD2: undefined,
},
};
dest = recursivelyMoveProperties(source, dest);
expect(dest).toEqual({
propS1: 'str1',
propS2: 'str2',
propS3: 'str3',
propN1: 1,
propN2: 2,
propN3: 3,
propA1: [1, 2, 3],
propA2: [],
propA3: [3, 2, 1],
propB1: true,
propB2: false,
propB3: false,
propU1: null,
propU2: null,
propU3: null,
propD1: undefined,
propD2: undefined,
propD3: undefined,
propO1: {
subS1: 'sub11',
subS2: 'sub12',
subN1: 11,
subN2: 12,
subA1: [11, 12, 13],
subA2: [],
subB1: false,
subB2: true,
subU1: null,
subU2: null,
subD1: undefined,
subD2: undefined,
},
propO2: {
subS1: 'sub21',
subS2: 'sub22',
subS3: 'sub23',
subN1: 21,
subN2: 22,
subN3: 23,
subA1: [21, 22, 23],
subA2: [],
subA3: [31, 32, 33],
subB1: false,
subB2: true,
subB3: true,
subU1: null,
subU2: null,
subU3: null,
subD1: undefined,
subD2: undefined,
subD3: undefined,
},
propO3: {
subS1: 'sub31',
subS2: 'sub32',
subN1: 31,
subN2: 32,
subA1: [31, 32, 33],
subA2: [],
subB1: false,
subB2: true,
subU1: null,
subU2: null,
subD1: undefined,
subD2: undefined,
},
});
});
});
Another variation using recursion, hope you find it useful.
const merge = (obj1, obj2) => {
const recursiveMerge = (obj, entries) => {
for (const [key, value] of entries) {
if (typeof value === "object") {
obj[key] = obj[key] ? {...obj[key]} : {};
recursiveMerge(obj[key], Object.entries(value))
else {
obj[key] = value;
}
}
return obj;
}
return recursiveMerge(obj1, Object.entries(obj2))
}
My use case for this was to merge default values into a configuration. If my component accepts a configuration object that has a deeply nested structure, and my component defines a default configuration, I wanted to set default values in my configuration for all configuration options that were not supplied.
Example usage:
export default MyComponent = ({config}) => {
const mergedConfig = mergeDefaults(config, {header:{margins:{left:10, top: 10}}});
// Component code here
}
This allows me to pass an empty or null config, or a partial config and have all of the values that are not configured fall back to their default values.
My implementation of mergeDefaults looks like this:
export default function mergeDefaults(config, defaults) {
if (config === null || config === undefined) return defaults;
for (var attrname in defaults) {
if (defaults[attrname].constructor === Object) config[attrname] = mergeDefaults(config[attrname], defaults[attrname]);
else if (config[attrname] === undefined) config[attrname] = defaults[attrname];
}
return config;
}
And these are my unit tests
import '#testing-library/jest-dom/extend-expect';
import mergeDefaults from './mergeDefaults';
describe('mergeDefaults', () => {
it('should create configuration', () => {
const config = mergeDefaults(null, { a: 10, b: { c: 'default1', d: 'default2' } });
expect(config.a).toStrictEqual(10);
expect(config.b.c).toStrictEqual('default1');
expect(config.b.d).toStrictEqual('default2');
});
it('should fill configuration', () => {
const config = mergeDefaults({}, { a: 10, b: { c: 'default1', d: 'default2' } });
expect(config.a).toStrictEqual(10);
expect(config.b.c).toStrictEqual('default1');
expect(config.b.d).toStrictEqual('default2');
});
it('should not overwrite configuration', () => {
const config = mergeDefaults({ a: 12, b: { c: 'config1', d: 'config2' } }, { a: 10, b: { c: 'default1', d: 'default2' } });
expect(config.a).toStrictEqual(12);
expect(config.b.c).toStrictEqual('config1');
expect(config.b.d).toStrictEqual('config2');
});
it('should merge configuration', () => {
const config = mergeDefaults({ a: 12, b: { d: 'config2' } }, { a: 10, b: { c: 'default1', d: 'default2' }, e: 15 });
expect(config.a).toStrictEqual(12);
expect(config.b.c).toStrictEqual('default1');
expect(config.b.d).toStrictEqual('config2');
expect(config.e).toStrictEqual(15);
});
});
Sometimes you don't need deep merge, even if you think so. For example, if you have a default config with nested objects and you want to extend it deeply with your own config, you can create a class for that. The concept is very simple:
function AjaxConfig(config) {
// Default values + config
Object.assign(this, {
method: 'POST',
contentType: 'text/plain'
}, config);
// Default values in nested objects
this.headers = Object.assign({}, this.headers, {
'X-Requested-With': 'custom'
});
}
// Define your config
var config = {
url: 'https://google.com',
headers: {
'x-client-data': 'CI22yQEI'
}
};
// Extend the default values with your own
var fullMergedConfig = new AjaxConfig(config);
// View in DevTools
console.log(fullMergedConfig);
You can convert it to a function (not a constructor).

Flatten nested object/array in javascript

I'm new to Javascript and I have nested objects and arrays that I would like to flatten.
I have...
[{ a: 2, b: [{ c: 3, d: [{e: 4, f: 5}, {e: 5,f: 6}]},
{ c: 4, d: [{e: 7, f: 8}]}
]
}]
and would like...
[{a:2,c:3,e:4,f:5}, {a:2,c:3,e:5,f:6}, {a:2,c:4,e:7,f:8}]
I've tried to adapt the following function written for an object for my array but i only get the final object within the array [{a:2,c:4,e:7,f:8}] https://stackoverflow.com/a/33158929/14313188. I think my issue is knowing how to iterate through arrays and objects?
original script:
function flatten(obj) {
var flattenedObj = {};
Object.keys(obj).forEach(function(key){
if (typeof obj[key] === 'object') {
$.extend(flattenedObj, flatten(obj[key]));
} else {
flattenedObj[key] = obj[key];
}
});
return flattenedObj;
}
my scripts (same result for both):
flat_array=[];
function superflat(array){
for (var i = 0; i < array.length; i++) {
var obj = array[i]
var flattenedObj = {};
Object.keys(obj).forEach(function(key){
if (typeof obj[key] === 'object') {
$.extend(flattenedObj, flatten(obj[key]));
} else {
flattenedObj[key] = obj[key];
}
});
flat_array.push(flattenedObj);
}
};
mega_flat_array=[];
function megaflatten(obj) {
Object.keys(obj).forEach(function(key){
var flattenedObj = {};
if (typeof obj[key] === 'object') {
$.extend(flattenedObj, flatten(obj[key]));
} else {
flattenedObj[key] = obj[key];
}
mega_flat_array.push(flattenedObj);
});
}
Thanks for your help
I would suggest starting with simpler data objects to test your function with, then progressively add more complex objects until your function performs as expected. forEach
Here you can see how I started with a simple test1 object, then test2 and so on so that the logic gets broken down into smaller increments.
To remove the duplicates we previously had, I had to throw and catch an error to break out of the recursive forEach loops, which added the unnecessary duplicate "rows" - perhaps it is better to use a normal for loop out of which you can simply break; and then use error handling for real errors.
The basic idea of the recursive function is to check the type of object (either array or object) and then loop through them to add values, but another check is needed on those to see if they are not Arrays and not Objects, if they are, call the function again. When a duplicate key is found i.e { c: 3}, remove the current key and add the new one before continuing the loop.
You can add some more tests if you have some more sample data, but there are better libraries to help you with TDD (test-driven development).
const isArray = (arr) => {
return Array.isArray(arr);
};
const isObject = (obj) => {
return typeof obj === "object" && obj !== null;
};
const flatten = (tree, row, result) => {
try {
if (isArray(tree)) {
tree.forEach((branch, index) => {
flatten(branch, row, result);
});
} else if (isObject(tree)) {
Object.keys(tree).forEach((key) => {
//we don't want to add objects or arrays to the row -
if (!isArray(tree[key]) && !isObject(tree[key])) {
if (key in row) {
// new row detected, get existing keys to work with
let keysArray = Object.keys(row);
// we are going to loop backwards and delete duplicate keys
let end = Object.keys(row).length;
let stopAt = Object.keys(row).indexOf(key);
//delete object keys from back of object to the newly found one
for (let z = end; z > stopAt; z--) {
delete row[keysArray[z - 1]];
}
row[key] = tree[key];
} else {
row[key] = tree[key];
}
} else {
flatten(tree[key], row, result);
throw "skip";
}
});
//all other rows in results will be overridden if we don't stringify
result.push(JSON.stringify(row));
}
} catch (e) {
//console.log(e)
} finally {
return result.map((row) => JSON.parse(row));
}
};
///tests
const test1 = [
{
a: 2,
b: 3,
},
];
const expected1 = [{ a: 2, b: 3 }];
const test2 = [
{
a: 2,
b: [
{
c: 3,
},
],
},
];
const expected2 = [{ a: 2, c: 3 }];
const test3 = [
{
a: 2,
b: [
{
c: 3,
},
{ c: 4 },
{ c: 5 },
],
},
];
const expected3 = [
{ a: 2, c: 3 },
{ a: 2, c: 4 },
{ a: 2, c: 5 },
];
let test4 = [
{
a: 2,
b: [
{
c: 3,
d: [
{ e: 4, f: 5 },
{ e: 5, f: 6 },
],
},
{ c: 4, d: [{ e: 7, f: 8 }] },
],
},
];
const expected4 = [
{ a: 2, c: 3, e: 4, f: 5 },
{ a: 2, c: 3, e: 5, f: 6 },
{ a: 2, c: 4, e: 7, f: 8 },
];
const test = (name, res, expected) => {
console.log(
`${name} passed ${JSON.stringify(res) === JSON.stringify(expected)}`
);
//console.log(res, expected);
};
//test("test1", flatten(test1, {}, []), expected1);
//test("test2", flatten(test2, {}, []), expected2);
//test("test3", flatten(test3, {}, []), expected3);
test("test4", flatten(test4, {}, []), expected4);
It's a bit of a behemoth, and it doesn't preserve the keys' order, but it does work with no duplicates.
It is recursive, so watch out for the call stack.
First, loop through the items in the array,
If an item is an array, make a recursive call.
On returning from that call, if the number of objects returned is more than there currently are in the final result, then update the returned objects with the properties from the objects in the final result, being careful to avoid overwriting pre-existing properties.
Otherwise update the final results with the properties in the returned result, again being careful not to overwrite existing properties.
If the item is not an array
If this is the first item put it into the final result
Otherwise add the item's properties to all the items in the final result, without overwriting any.
function makeFlat(arr) //assume you're always passing in an array
{
let objects = [];
arr.forEach(item =>
{
let currentObject = {};
const keys = Object.keys(item);
keys.forEach(key =>
{
const obj = item[key];
if(Array.isArray(obj))
{
let parts = makeFlat(obj);
if(objects.length > 0)
{
if(parts.length > objects.length)
{
parts.forEach(part =>
{
objects.forEach(ob =>
{
Object.keys(ob).forEach(k =>
{
if(Object.keys(part).indexOf(k) == -1)
{
part[k] = ob[k];
}
});
});
});
objects = parts;
}
else
{
objects.forEach(ob =>
{
parts.forEach(part =>
{
Object.keys(part).forEach(k =>
{
if(Object.keys(ob).indexOf(k) == -1)
{
ob[k] = part[k];
}
});
});
});
}
}
else
{
objects = parts;
}
}
else
{
if(Object.keys(currentObject).length == 0)
{
objects.push(currentObject);
}
currentObject[key] = item[key];
objects.forEach(ob =>
{
if(Object.keys(ob).indexOf(key) == -1)
{
ob[key] = currentObject[key]
}
});
}
});
});
return objects;
}
const inp = [{ a: 2, b: [{ c: 3, d: [{e: 4, f: 5}, {e: 5,f: 6}]},
{ c: 4, d: [{e: 7, f: 8}]}
], g:9
}];
let flattened = makeFlat(inp);
flattened.forEach(item => console.log(JSON.stringify(item)));

javascript compare all attributes of an object with other object

I have two objects A
var A = {
a: 1,
b: 2,
c: 3,
d: {
0: {
e: 4,
f: 5
},
1: {
g: 6
}
}
};
var B = {
a: 1,
b: 9,
c: 3,
d: {
0: {
e: 4,
f: 8
}
}
};
I want to compare all attributes in B with corresponding attributes in A, if present. Return if their values doesn't match.
so I want to return
b: 9,
d:{
0: {
f: 8
}
}
Is there a simple soluton for this in lodash way?
EDIT : I tried Object.keys for A and B, did a intersection and then compare each key value. Its failing when I have nested attributes.
Here is a basic function that uses lodash to detect changes in simple nested objects and arrays. It is by no means comprehensive, but it works well for simple changesets.
function changes (a, b) {
if (_.isEqual(a, b)) {
return;
} else {
if (_.isArray(a) && _.isArray(b)) {
return _.reduce(b, function(array, value, index) {
value = changes(a[index], value);
if (!_.isUndefined(value)) {
array[index] = value;
}
return array;
}, []);
} else if (_.isObject(a) && _.isObject(b)) {
return _.reduce(b, function(object, value, key) {
value = changes(a[key], value);
if (!_.isUndefined(value)) {
object[key] = value;
}
return object;
}, {});
} else {
return b;
}
}
}
Here is an example of it in action: http://plnkr.co/edit/drMyGDAh2XP0925pBi8X?p=preview

looking for a function to perform a more sofisticated job than underscore.js's extend function [closed]

Closed. This question does not meet Stack Overflow guidelines. It is not currently accepting answers.
We don’t allow questions seeking recommendations for books, tools, software libraries, and more. You can edit the question so it can be answered with facts and citations.
Closed 8 years ago.
Improve this question
I wonder if there is some helper JavaScript library that has a function similar to underscore.js's _.extend.
What I am looking for is a function that given an associative array like the following:
{ foo: 1, bar: 2 }
and another "extending" associative array like the following:
{ foo : 3 }
can easily build the following "augmented" structure:
{ foo: [1, 3], bar: 2 }
otherwise I have to do it manually, but this task seems general enough to be a function in some helper library.
clarification with an example on what should happen with different objects:
base object: { foo: 1, bar: 2 }
extending object: { quuz: 3, bar: 4 }
result: { foo: 1, bar: [2, 4], quuz: 3 }
Actually, now it is clear to me that the operation is commutative (base and extending can be switched with the result being always the same)
additional example:
base object: { foo: 1, bar: [2,5] }
extending object: { foo: {a: 'A', b: 'B'} , bar: 4 }
result: { foo: [1, {a: 'A', b: 'B'}], bar: [2, 4, 5], quuz: 3 }
Your requirement is not very clear, what do you want to happen with different objects? But, this is a shallow extend that does what you seem to be asking, or use it as a base for your experiments and tweek it to your design.
Javascript
function curstomExtend(target) {
var typeTarget = typeof target;
if (target === null || typeTarget !== 'object' && typeTarget !== 'function') {
throw new TypeError('target');
}
Array.prototype.slice.call(arguments, 1).forEach(function (source) {
var typeSource = typeof source;
if (source === null || typeSource !== 'object' && typeSource !== 'function') {
throw new TypeError('source');
}
Object.keys(source).forEach(function (key) {
var temp;
if (target.hasOwnProperty(key)) {
if (Array.isArray(target[key])) {
target[key].push(source[key]);
} else {
target[key] = [target[key]];
target[key].push(source[key]);
}
} else {
target[key] = source[key];
}
});
});
return target;
};
var a = {
foo: 1,
bar: 2,
fy: {
a: 1,
b: 2
}
},
b = {
foo: 3,
fy: {
s: 3,
t: 4
}
},
c = {
foo: [4, 5]
},
d = {
foo: {
x: 4,
y: 5
},
fy: {
s: 5,
t: 6
}
};
curstomExtend(a, b, c, d);
console.log(JSON.stringify(a));
Output
{"foo":[1,3,[4,5],{"x":4,"y":5}],"bar":2,"fy":[{"a":1,"b":2},{"s":3,"t":4},{"s":5,"t":6}]}
On jsFiddle
my solution
I wrote the following functions blend and blend_many that, combined,
are able to recursively "blend" the two objects.
/**
* applies blend to all the objects contained in the input list.
* The output is an object that contains a blend of all the input objects.
*/
function blend_many(list) {
var ret = _.reduce(list, blend, {});
for (var key in ret) {
var value = ret[key];
if (_.isArray(value)) {
var all_are_objects = _.reduce(value, function (a, b) {
return a && _.isObject(b);
}, true);
if (all_are_objects) {
// recurse!
value = blend_many(value);
}
}
ret[key] = value;
}
return ret;
}
function blend(base, extending) {
for (key in extending) {
var value = extending[key];
if (_.isArray(value)) {
// recurse
value = blend_many(value);
}
if (base[key] === undefined) {
base[key] = value;
} else if (_.isArray(base[key])) {
base[key].push(value)
} else {
var prev = base[key];
base[key] = [prev, value];
}
}
return base;
}
for example, calling:
list_of_objs = [
{
a: {
foo: 1
}
},
{
a: {
foo: 2
}
}
];
blend_many(list_of_objs);
will return the following object:
{
a: {
foo: [ 1, 2 ]
}
}
(very useful for building mongodb queries!)

Categories

Resources