map add/reduce two array object with same index - javascript

I have two array object as following:
var arr1 = [
{
name: 1,
value: 10
},
{
name: 2,
value: 15
}
]
var arr2 = [
{
name: 3,
value: 5
},
{
name: 4,
value: 3
}
]
I want to redefine the key and reduce each data with the same index.
output:
var arr1 = [
{
itemLabel: 1,
itemValue: 5
},
{
itemLabel: 2,
itemValue: 12
}
]
I'm doing now as following:
formatData = arr1.map((row, index) => ({
itemLabel: arr1.name,
itemValue: arr1.value - arr2[index].value
}))
Is there any better solution of doing this?

One-man army
A simple recursive program that handles everything in a single function. There's a clear mixture of concerns here which hurts of function's overall readability. We'll see one such remedy for this problem below
const main = ([x, ...xs], [y, ...ys]) =>
x === undefined || y === undefined
? []
: [ { itemLabel: x.name, itemValue: x.value - y.value } ] .concat (main (xs, ys))
const arr1 =
[ { name: 1, value: 10 }, { name: 2, value: 15 } ]
const arr2 =
[ { name: 3, value: 5 }, { name: 4, value: 3 } ]
console.log (main (arr1, arr2))
// [ { itemLabel: 1, itemValue: 5 },
// { itemLabel: 2, itemValue: 12 } ]
Thinking with types
This part of the answer is influenced by type theory from the Monoid category – I won't go too far into it because I think the code should be able to demonstrate itself.
So we have two types in our problem: We'll call them Foo and Bar
Foo – has name, and value fields
Bar – has itemLabel and itemValue fields
We can represent our "types" however we want, but I chose a simple function which constructs an object
const Foo = (name, value) =>
({ name
, value
})
const Bar = (itemLabel, itemValue) =>
({ itemLabel
, itemValue
})
Making values of a type
To construct new values of our type, we just apply our functions to the field values
const arr1 =
[ Foo (1, 10), Foo (2, 15) ]
const arr2 =
[ Foo (3, 5), Foo (4, 3) ]
Let's see the data we have so far
console.log (arr1)
// [ { name: 1, value: 10 },
// { name: 2, value: 15 } ]
console.log (arr2)
// [ { name: 3, value: 5 },
// { name: 4, value: 3 } ]
Some high-level planning
We're off to a great start. We have two arrays of Foo values. Our objective is to work through the two arrays by taking one Foo value from each array, combining them (more on this later), and then moving onto the next pair
const zip = ([ x, ...xs ], [ y, ...ys ]) =>
x === undefined || y === undefined
? []
: [ [ x, y ] ] .concat (zip (xs, ys))
console.log (zip (arr1, arr2))
// [ [ { name: 1, value: 10 },
// { name: 3, value: 5 } ],
// [ { name: 2, value: 15 },
// { name: 4, value: 3 } ] ]
Combining values: concat
With the Foo values properly grouped together, we can now focus more on what that combining process is. Here, I'm going to define a generic concat and then implement it on our Foo type
// generic concat
const concat = (m1, m2) =>
m1.concat (m2)
const Foo = (name, value) =>
({ name
, value
, concat: ({name:_, value:value2}) =>
// keep the name from the first, subtract value2 from value
Foo (name, value - value2)
})
console.log (concat (Foo (1, 10), Foo (3, 5)))
// { name: 1, value: 5, concat: [Function] }
Does concat sound familiar? Array and String are also Monoid types!
concat ([ 1, 2 ], [ 3, 4 ])
// [ 1, 2, 3, 4 ]
concat ('foo', 'bar')
// 'foobar'
Higher-order functions
So now we have a way to combine two Foo values together. The name of the first Foo is kept, and the value properties are subtracted. Now we apply this to each pair in our "zipped" result. Functional programmers love higher-order functions, so you'll appreciate this higher-order harmony
const apply = f => xs =>
f (...xs)
zip (arr1, arr2) .map (apply (concat))
// [ { name: 1, value: 5, concat: [Function] },
// { name: 2, value: 12, concat: [Function] } ]
Transforming types
So now we have the Foo values with the correct name and value values, but we want our final answer to be Bar values. A specialized constructor is all we need
Bar.fromFoo = ({ name, value }) =>
Bar (name, value)
Bar.fromFoo (Foo (1,2))
// { itemLabel: 1, itemValue: 2 }
zip (arr1, arr2)
.map (apply (concat))
.map (Bar.fromFoo)
// [ { itemLabel: 1, itemValue: 5 },
// { itemLabel: 2, itemValue: 12 } ]
Hard work pays off
A beautiful, pure functional expression. Our program reads very nicely; flow and transformation of the data is easy to follow thanks to the declarative style.
// main :: ([Foo], [Foo]) -> [Bar]
const main = (xs, ys) =>
zip (xs, ys)
.map (apply (concat))
.map (Bar.fromFoo)
And a complete code demo, of course
const Foo = (name, value) =>
({ name
, value
, concat: ({name:_, value:value2}) =>
Foo (name, value - value2)
})
const Bar = (itemLabel, itemValue) =>
({ itemLabel
, itemValue
})
Bar.fromFoo = ({ name, value }) =>
Bar (name, value)
const concat = (m1, m2) =>
m1.concat (m2)
const apply = f => xs =>
f (...xs)
const zip = ([ x, ...xs ], [ y, ...ys ]) =>
x === undefined || y === undefined
? []
: [ [ x, y ] ] .concat (zip (xs, ys))
const main = (xs, ys) =>
zip (xs, ys)
.map (apply (concat))
.map (Bar.fromFoo)
const arr1 =
[ Foo (1, 10), Foo (2, 15) ]
const arr2 =
[ Foo (3, 5), Foo (4, 3) ]
console.log (main (arr1, arr2))
// [ { itemLabel: 1, itemValue: 5 },
// { itemLabel: 2, itemValue: 12 } ]
Remarks
Our program above is implemented with a .map-.map chain which means handling and creating intermediate values multiple times. We also created an intermediate array of [[x1,y1], [x2,y2], ...] in our call to zip. Category theory gives us things like equational reasoning so we could replace m.map(f).map(g) with m.map(compose(f,g)) and achieve the same result. So there's room to improve this yet, but I think this is just enough to cut your teeth and start thinking about things in a different way.

Your code is just fine, you could use recursion as well:
var arr1 =[{
name: 1,
value: 10
}, {
name: 2,
value: 15
}];
var arr2= [{
name: 3,
value: 5
}, {
name: 4,
value: 3
}]
const createObject=(arr1,arr2,ret=[])=>{
if(arr1.length!==arr2.length){
throw("Arrays should be the same length.")
}
const item = {
itemLabel: arr1[0].name,
itemValue: arr1[0].value - arr2[0].value
};
if(arr1.length===0){
return ret;
};
return createObject(arr1.slice(1),arr2.slice(1),ret.concat(item));
}
console.log(createObject(arr1,arr2));
Both functions implementing a map or reduce would have to use either arr1 or arr2 outside of their scope (not passed to it as parameter) so strictly speaking not pure. But you could easily solve it with partial application:
var arr1 =[{
name: 1,
value: 10
}, {
name: 2,
value: 15
}];
var arr2= [{
name: 3,
value: 5
}, {
name: 4,
value: 3
}];
const mapFunction = arr2 => (item,index) => {
return {
itemLabel: item.name,
itemValue: item.value - arr2[index].value
}
}
var createObject=(arr1,arr2,ret=[])=>{
if(arr1.length!==arr2.length){
throw("Arrays should be the same length.")
}
const mf = mapFunction(arr2);
return arr1.map(mf);
}
console.log(createObject(arr1,arr2));
But as CodingIntrigue mentioned in the comment: none of these are any "better" than you've already done.

To make your solution more functional you need to change your anonymous function to a pure (anonymous) function.
A pure function is a function that, given the same input, will always return the same output
The anonymous function depends on the mutable variable arr1 and arr2. That means that it depends on the system state. So it doesn't fit into the pure function rule.
The following is maybe not the best implementaion but I hope it gives you an idea..
Let's Make it Pure
To make it pure we can pass the variables into the function as arguments
const mapWithObject = (obj2, obj1, index) => ({
itemLabel: obj1.name,
itemValue: obj1.value - obj2[index].value
})
// example call
const result = mapWithObject(arr2, arr1[0], 0)
Ok, but now the function doesn't fit into map anymore because it takes 3 arguments instead of 2..
Let's Curry it
const mapWithObject = obj2 => (obj1, index) => ({
itemLabel: obj1.name,
itemValue: obj1.value - obj2[index].value
})
const mapObject_with_arr2 = mapWithObject(arr2)
// example call
const result = mapObject_with_arr2(arr1[0], 0)
Full Code
const arr1 = [{
name: 1,
value: 10
},
{
name: 2,
value: 15
}
]
const arr2 = [{
name: 3,
value: 5
},
{
name: 4,
value: 3
}
]
const mapWithObject = obj2 => (obj1, index) => ({
itemLabel: obj1.name,
itemValue: obj1.value - obj2[index].value
})
const mapObject_with_arr2 = mapWithObject(arr2)
const mappedObject = arr1.map(mapObject_with_arr2)
console.log(mappedObject)

If you don't care to much about performance, but want to separate your concerns a bit further you could use this approach:
Define a function that does the "pairing" between arr1 and arr2
[a, b, c] + [1, 2, 3] -> [ [ a, 1 ], [ b, 2 ], [ c, 3 ] ]
Define a function that clearly shows the merge strategy of two objects
{ a: 1, b: 10 } + { a: 2, b: 20 } -> { a: 1, b: -10 }
Define simple helpers that compose the two so you can pass your original arrays and be returned the desired output in one function call.
Here's an example:
var arr1=[{name:1,value:10},{name:2,value:15}],arr2=[{name:3,value:5},{name:4,value:3}];
// This is a very general method that bundles two
// arrays in an array of pairs. Put it in your utils
// and you can use it everywhere
const pairs = (arr1, arr2) => Array.from(
{ length: Math.max(arr1.length, arr2.length) },
(_, i) => [ arr1[i], arr2[i] ]
);
// This defines our merge strategy for two objects.
// Ideally, you should give it a better name, based
// on what the objects represent
const merge =
(base, ext) => ({
itemLabel: base.name,
itemValue: base.value - ext.value
});
// This is a helper that "applies" our merge method
// to an array of two items.
const mergePair = ([ base, ext ]) => merge(base, ext);
// Another helper that composes `pairs` and `mergePair`
// to allow you to pass your original data.
const mergeArrays = (arr1, arr2) => pairs(arr1, arr2).map(mergePair);
console.log(mergeArrays(arr1, arr2));

Related

Functional way to get previous element during map

I have an array which I map over. I need to compare the current element with the previous. I am detecting if the current element is the same as the previous element by comparing their ids and doing something different based on this condition. Is there any purely functional way to do it without doing index math?
items.map((item, index) => {
if(item.id === items[index - 1 > 0 ? index - 1 : 0].id) {
// do something
} else {
// do something else
}
})
The code works but I would like to avoid doing math on the index. Is there any way to do it?
The reduce() function provides a functional what you need:
items.reduce((previousValue, currentValue) => {
if(currentValue.id === previousValue.id) {
// do something
} else {
// do something else
}
});
Are you sure that you want a map? This sounds like an XY problem. If you want to map over adjacent elements of an array then you'd have to define your own function.
const mapAdjacent = (mapping, array) => {
const {length} = array, size = length - 1, result = new Array(size);
for (let i = 0; i < size; i++) result[i] = mapping(array[i], array[i + 1]);
return result;
};
const items = [1, 2, 3, 4, 5];
const result = mapAdjacent((x, y) => [x, y], items);
console.log(result); // [[1, 2], [2, 3], [3, 4], [4, 5]]
Note that this will throw a RangeError if you give it an empty array as input.
const mapAdjacent = (mapping, array) => {
const {length} = array, size = length - 1, result = new Array(size);
for (let i = 0; i < size; i++) result[i] = mapping(array[i], array[i + 1]);
return result;
};
const items = [];
const result = mapAdjacent((x, y) => [x, y], items); // RangeError: Invalid array length
console.log(result);
I think this is good behaviour because you shouldn't be giving mapAdjacent an empty array to begin with.
Here's a purely functional implementation of mapAdjacent which uses reduceRight. As an added bonus, it works for any iterable object.
const mapAdjacent = (mapping, [head, ...tail]) =>
tail.reduceRight((recur, item) => prev =>
[mapping(prev, item), ...recur(item)]
, _ => [])(head);
const items = "hello";
const result = mapAdjacent((x, y) => [x, y], items);
console.log(result); // [['h', 'e'], ['e', 'l'], ['l', 'l'], ['l', 'o']]
Unlike the iterative version, it returns an empty array instead of throwing an error if you give it an empty array as input.
const mapAdjacent = (mapping, [head, ...tail]) =>
tail.reduceRight((recur, item) => prev =>
[mapping(prev, item), ...recur(item)]
, _ => [])(head);
const items = "";
const result = mapAdjacent((x, y) => [x, y], items);
console.log(result); // []
Note that this is an unintended side effect of array destructuring with rest elements in JavaScript. The equivalent Haskell version does raise an exception.
mapAdjacent :: (a -> a -> b) -> [a] -> [b]
mapAdjacent f (x:xs) = foldr (\y g x -> f x y : g y) (const []) xs x
main :: IO ()
main = do
print $ mapAdjacent (,) "hello" -- [('h','e'),('e','l'),('l','l'),('l','o')]
print $ mapAdjacent (,) "" -- Exception: Non-exhaustive patterns in function mapAdjacent
However, returning an empty array might be desirable for this function. It's equivalent to adding the mapAdjacent f [] = [] case in Haskell.
Not a particularly fast implementation, but destructuring assignment makes it particularly elegant -
const None =
Symbol ()
const mapAdjacent = (f, [ a = None, b = None, ...more ] = []) =>
a === None || b === None
? []
: [ f (a, b), ...mapAdjacent (f, [ b, ...more ]) ]
const pair = (a, b) =>
[ a, b ]
console.log(mapAdjacent(pair, [ 1, 2, 3 ]))
// [ [ 1, 2 ], [ 2, 3 ] ]
console.log(mapAdjacent(pair, "hello"))
// [ [ h, e ], [ e, l ], [ l, l ], [ l, o ] ]
console.log(mapAdjacent(pair, [ 1 ]))
// []
console.log(mapAdjacent(pair, []))
// []
Or write it as a generator -
const mapAdjacent = function* (f, iter = [])
{ while (iter.length > 1)
{ yield f (...iter.slice(0,2))
iter = iter.slice(1)
}
}
const pair = (a, b) =>
[ a, b ]
console.log(Array.from(mapAdjacent(pair, [ 1, 2, 3 ])))
// [ [ 1, 2 ], [ 2, 3 ] ]
console.log(Array.from(mapAdjacent(pair, "hello")))
// [ [ h, e ], [ e, l ], [ l, l ], [ l, o ] ]
console.log(Array.from(mapAdjacent(pair, [ 1 ])))
// []
console.log(Array.from(mapAdjacent(pair, [])))
// []
As I mentioned in a comment, I would suggest using reduce. Here is an example:
const input = [
{id: 1, value: "Apple Turnover"},
{id: 1, value: "Apple Turnover"},
{id: 2, value: "Banana Bread"},
{id: 3, value: "Chocolate"},
{id: 3, value: "Chocolate"},
{id: 3, value: "Chocolate"},
{id: 1, value: "Apple"},
{id: 4, value: "Danish"},
];
// Desired output: Array of strings equal to values in the above array,
// but with a prefix string of "New: " or "Repeated: " depending on whether
// the id is repeated or not
const reducer = (accumulator, currentValue) => {
let previousValue, descriptions, isRepeatedFromPrevious;
if (accumulator) {
previousValue = accumulator.previousValue;
descriptions = accumulator.descriptions;
isRepeatedFromPrevious = previousValue.id === currentValue.id;
} else {
descriptions = [];
isRepeatedFromPrevious = false;
}
if (isRepeatedFromPrevious) {
// The following line is not purely functional and performs a mutation,
// but maybe we do not care because the mutated object did not exist
// before this reducer ran.
descriptions.push("Repeated: " + currentValue.value);
} else {
// Again, this line is mutative
descriptions.push("New: " + currentValue.value);
}
return { previousValue: currentValue, descriptions }
};
const output = input.reduce(reducer, null).descriptions;
document.getElementById('output').innerText = JSON.stringify(output);
<output id=output></output>

Collecting indexed statisticts from list of objects in JS

So I have a list of objects, ex.
var data = [{a: 'data1', b: 'subdata1'}, {a: 'data2', b: 'subdata2'}, {a: 'data1', b: 'subdata3'}, {a: 'data1', b: 'subdata1'}]
(note the objects have other attributes too)
I'm looking to extract some condensed details of this list with a result:
[{type: 'data1', list: [{subtype: 'subdata1', count: 2}, {subtype: 'subdata3', count: 1}]}, {type: 'data2', list: [{subtype: 'data2', count: 1}]}]
I have been able to count the type (or subtype) with reduce:
data.reduce((lst, item) => { lst[item.type] = lst[item.type] + 1 || 1; return lst; }, {});
but, this isn't exactly the structure or complete detail I'm looking to achieve. I can obviously do the work manually with a for loop, but I'm hoping to understand map, reduce, etc. better for a cleaner/simpler implementation.
You could create a nested lookup table, (a -> b -> count), then you can iterate over that and build the result:
const table = {};
for(const { a, b } of data) {
if(!table[a]) table[a] = {};
if(!table[a][b]) table[a][b] = 0;
table[a][b]++;
}
const result = Object.entries(table)
.map(([type, entries]) => ({ type, list: Object.entries(entries).map(([ subtype, count ]) => ({ subtype, count })), }));
Yes, one could write that as a functional chain:
const result = Object.entries(
data.reduce(
((table, { a, b }) => (table[a] || (table[a] = {}))[b] = (table[a][b] || 0) + 1, table),
{}
)
).map(([type, entries]) => ({
type,
list: Object.entries(entries).map(([ subtype, count ]) => ({ subtype, count })),
}));
But IMO thats less readable.

javascript: Using lodash/fp flow to return objects

I am converting a _.chain group of functions to use _fp.flow, but am having some difficulty dealing with the way flow curries complex objects. I am trying to
Reduce an array of objects with some grouped function (e.g. countBy/sumBy) into a object/dictionary (e.g. { group1:10, group2:15... } )
Map it into an array of key/value pairs (e.g. [{column: 'group1', value: '10'}, ...])
Sort by some variable into asc/desc order
but right now the resulting object ends up being flattened into a long array. A sample of the code is below. The reducer function in the code below is working correctly and grouping the values as I intended, but then I think the currying between the each step and orderBy is flattening the object somehow (the desired object is formed correctly after _.each in the console.log.
I've put a sample of the code in the attached JSFiddle.
const inData = [{
target: 123,
groupby: 'a'
},...
}];
const colData = _.flow(
_.reduce(reducer, {}),
_.toPairs,
_.each(([value, column]) => {
console.log(value);
console.log(column);
const outObj = {
value: value,
column: column
}
console.log(outObj)
return (outObj);
}),
_.orderBy(['value'], [sortDir]),
// Have tried result with or without fromPairs
_.fromPairs
)(inData);
PS: I am using ES6 syntax and React in my main project, if that makes a difference.
https://jsfiddle.net/moc0L5ac/
You need to use map instead of each and also fix the order of [value, column] to [column, value]
const colData = _.flow(
_.reduce(reducer, {}),
_.toPairs,
_.map(([column, value]) => {
const outObj = {
value: value,
column: column
}
return outObj;
}),
_.orderBy(['value'], [sortDir])
)(inData);
To the best my understanding, this is what you're looking to accomplish
const inData =
[ { target: 123, groupby: 'a' },
{ target: -123, groupby: 'b' },
{ target: 123, groupby: 'a' },
{ target: -123, groupby: 'b' } ]
const colData = _.flow(
_.reduce((map, {target:v, groupby:k}) =>
Object.assign(map, { [k]: map[k] === undefined ? v : map[k] + v }), {}),
_.toPairs,
_.map(([column, value]) => ({ column, value }))
)
console.log(colData(inData));
// => [ { column: 'a', value: 246 },
// { column: 'b', value: -246 } ]

ramdajs: locating items with an inner array that satisfies a spec

Given a structure like this:
[
{
documentType: { id: 4001 }
correspondence: [ { id: 1000 }, { id: 1010 } ]
},
{
documentType: { id: 102 }
correspondence: [ { id: 1000 } ]
},
{
documentType: { id: 101 }
correspondence: [ { id: 1001 } ]
}
]
I am trying to use ramda to find the indexes of the array where the inner correspondence array contains 1000.
I have tried this:
R.filter(R.where({ correspondence: R.any(R.where({ id: 1000 }))}))(data)
First you'll want a slight tweak to your predicate function, changing the inner R.where to R.propEq to allow comparison against a constant value rather than a function:
const pred = R.where({ correspondence: R.any(R.propEq('id', 1000))})
Then I have two examples of how you could approach this, both making use of R.addIndex to capture the index:
One using R.reduce to build up a list while testing each element:
const reduceWithIdx = R.addIndex(R.reduce)
const fn = reduceWithIdx((acc, x, i) => pred(x) ? R.append(i, acc) : acc, [])
fn(data) //=> [0, 1]
The second using R.map to embed the index in each element before filtering:
const mapWithIdx = R.addIndex(R.map)
const fn = R.pipe(
mapWithIdx(R.flip(R.assoc('idx'))),
R.filter(pred),
R.map(R.prop('idx'))
)
fn(data) //=> [0, 1]

How to merge two dictionaries in javascript

I have two arrays of objects:
Array1:
var myArr1 = [];
myArr1["1"]={any:1,some:1};
myArr1["2"]={any:2,some:2};
myArr1["3"]={any:3,some:3};
Array2:
var myArr2 = [];
myArr2["1"]={other:1};
myArr2["2"]={other:2};
And I want them to be merged by their keys into a new Attribute, so the result will be:
[
{any:1,some:1,myNewAttribute:{other:1}},
{any:2,some:2,myNewAttribute:{other:2}},
{any:3,some:3,myNewAttribute:{other:3}}
]
I tried to achieve it with lodash's _.merge() but I failed miserably. _.merge only adds the second array after the first, but does not match their keys / ids.
You could map the second array to a new property and merge later.
With lodash
var data1 = [{ any: 1, some: 1 }, { any: 2, some: 2 }, { any: 3, some: 3 }],
data2 = [{ other: 1 }, { other: 2 }, { other: 3 }];
console.log(_.merge(data1, _.map(data2, x => ({ myNewAttribute: x }))));
<script src="https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.15.0/lodash.min.js"></script>
With ES6, without lodash
var data1 = [{ any: 1, some: 1 }, { any: 2, some: 2 }, { any: 3, some: 3 }],
data2 = [{ other: 1 }, { other: 2 }, { other: 3 }];
console.log(data1.map((a, i) => Object.assign({}, a, { myNewAttribute: data2[i] })));
You don't need lodash:
myArr1.map((e1, idx) => Object.assign({}, e1, {myNewAttribute: myArr2[idx]}))
You could get fancy and write a little function called map2, which takes two arrays, and invokes a callback with the two elements:
function map2(a1, a2, fn) {
return a1.map((elt, idx) => fn(elt, a2[idx]);
}
Now you can write the solution as
map2(myArr1, myArr2, (e1, e2) => Object.assign({}, e1, {myNewAttribute: e2}))
From the perspective of program design, what we are doing here is "separating concerns". The first concern is the abstract operation of looping over two arrays in parallel and doing something with each pair of elements. That is what is represented by map2. The second concern is the specific way you want to combine the elements. That is what is represented by the function we are passing to map2. This could be made clearer and somewhat self-documenting by writing it separately:
function combineObjects(e1, e2) {
return Object.assign({}, e1, {myNewAttribute: e2});
}
map2(myArr1, myArr2, combineObjects);
Of course, in the real world, you'd want to handle the case where the two arrays were of different length, pass an index to the callback as a third parameter for use if necessary, support a third thisArg-type parameter analogous to map, etc.
You can do like this:
var first = [{any:1,some:1},{any:2,some:2},{any:3,some:3}];
var second = [{other:1},{other:2},{other:3}];
for(var i = 0; i < first.length; i++){
if(first[i] && second[i]){
first[i]['mycustomitem'] = second[i];
}
}
console.log(first);
In order to prove, what I did comment 30 minutes ago -
How to merge two dictionaries in javascript -
there is a possible reduce approach ...
... firstly provided as lodash based example ...
var
myArr1 = [
{any: 1, some: 1},
{any: 2, some: 2},
{any: 3, some: 3}
],
myArr2 = [
{other: 1},
{other: 2}
],
mergedObjectList = _.reduce(myArr1, function (collector, item_1, idx) {
var
item_2 = collector[idx],
merger = _.assign({}, item_1, item_2);
// or whatever one wants to do to `merger` with `myNewAttribute`
collector[idx] = merger;
return collector;
}, _.clone(myArr2));
console.log("myArr1 : ", myArr1);
console.log("myArr2 : ", myArr2);
console.log("mergedObjectList : ", mergedObjectList);
<script src="https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.15.0/lodash.min.js"></script>
... and secondly as language core only based example ...
var
myArr1 = [
{any: 1, some: 1},
{any: 2, some: 2},
{any: 3, some: 3}
],
myArr2 = [
{other: 1},
{other: 2}
],
mergedObjectList = myArr1.reduce(function (collector, item_1, idx) {
var
item_2 = collector[idx],
merger = Object.assign({}, item_1, item_2);
// or whatever one wants to do to `merger` with `myNewAttribute`
collector[idx] = merger;
return collector;
}, Array.from(myArr2));
console.log("myArr1 : ", myArr1);
console.log("myArr2 : ", myArr2);
console.log("mergedObjectList : ", mergedObjectList);
Try this function:
function mergeDictionary(_dctn1,_dctn2)
{
var newDict = [];
for(var i in _dctn1)
{
newDict[i] = _dctn1[i];
}
for(var j in _dctn2)
{
if(newDict[j] == undefined)
{
newDict[j] = _dctn2[j];
}
else
{
for(var k in _dctn2[j])
{
newDict[j][k] = _dctn2[j][k];
}
}
}
return newDict;
}
var myArr1 = [];
myArr1["1"]={any:1,some:1};
myArr1["2"]={any:2,some:2};
myArr1["3"]={any:3,some:3};
var myArr2 = [];
myArr2["1"]={other:1};
myArr2["2"]={other:2};
console.log(mergeDictionary(myArr1, myArr2));

Categories

Resources