Concatenate two arrays of objects and remove repeated data from an attribute - javascript

I have 2 arrays of objects. One array receives the data from the API and renders it in the application, the other receives the data from localStorage, which are data from the first and which have been changed and stored in localStorage. I want to concatenate these two arrays, but I need to remove the repeated data in order not to render the same object twice.
example of what I hope:
dado1 = [
{customer: {
purchased: false,
id: 1
}},
{customer: {
purchased: false,
id: 2
}}
]
dado2 = [
{customer: {
purchased: true,
id: 1
}}
]
dado3 = dado1.concat (dado2)
result:
dado3 = [
{customer: {
purchased: true,
id: 1
}},
{customer: {
purchased: false,
id: 2
}}
]
I am not able to compare the two arrays. I've thought of several ways, but I always fail

some thing like that ?
const dado1 =
[ { customer: { purchased: false, id: 1 } }
, { customer: { purchased: false, id: 2 } }
]
const dado2 =
[ { customer: { purchased: true, id: 1 } }
]
const dado3 = dado1.map(el=>
{
let nv = dado2.find(x=>x.customer.id === el.customer.id )
return nv? nv : el
})
console.log( dado3 )
.as-console-wrapper { max-height: 100% !important; top: 0; }
but be carrefull, this is array of object of object so prefer to use
const dado3 = dado1.map(el=>
{
let nv = dado2.find(x=>x.customer.id === el.customer.id )
return nv? JSON.parse(JSON.stringify(nv)) : JSON.parse(JSON.stringify(el))
})
if you want a real new array

You can make your customer objects unique by mapping them by the id field, and ensuring that entries from the 2nd object take precedence (this will prefer local storage over api results):
let mergeCustomerData = (arr1, arr2) => {
// Both arrays are converted to maps, where the `item.customer.id` property determines the key
[ arr1, arr2 ] = [ arr1, arr2 ].map(arr => new Map(arr.map(v => [ v.customer.id, v ])));
// Merge these Maps into one (preference determined by the order of `arr1` and `arr2`)
let merged = new Map([ ...arr1, ...arr2 ]);
// Return the merged values, converted back to an Array
return [ ...merged ].map(([ id, v ]) => v);
};
let dado1 = [
{ customer: { purchased: false, id: 1 } },
{ customer: { purchased: false, id: 2 } }
];
let dado2 = [
{ customer: { purchased: true, id: 1 } }
];
console.log(mergeCustomerData(dado1, dado2));

Collect the ids from the second array. Take as result the second array plus all elements from the first array which have a id that is not in the stored ids.
dado1 = [
{customer: { purchased: false, id: 1 }},
{customer: { purchased: false, id: 2 }}
];
dado2 = [
{customer: { purchased: true, id: 1 }},
{customer: { purchased: true, id: 5 }}
];
function concatDado( dado1, dado2) {
let result = dado2;
let ids = [];
dado2.forEach(element => ids.push(element.customer.id));
dado1.forEach(element => {
id = element.customer.id;
if ( ids.indexOf(id) == -1 )
result.push(element);
});
return result;
}
console.log( concatDado( dado1, dado2));

Related

Joining two arrays using a common property javascript

What I want to do is basically join the information from two arrays via userId. Until then, this solution that I made works only when there is little data, if they are very large arrays, this huge amount of filter is very impractical. Can anyone think of a more efficient solution?
PS: I'm using > 0 ? because sometimes one of the properties is empty.
const data01 = [
{ userId: 0, motorcycles: 'motorcycle01', cars: 'car01' },
{ userId: 1, motorcycles: 'motorcycle02', cars: 'car02' },
{ userId: 2, cars: 'car03' },
{ userId: 3, motorcycles: 'motorcycle04' },
]
items.forEach(
a =>
(
a.motorcylces = data01.filter(b => b.userId === a.userId).length > 0 ? data01.filter(b => b.userId === a.userId)[0].motorcylces : null,
a.cars = data01.filter(b => b.userId === a.userId).length > 0 ? data01.filter(b => b.userId === a.userId)[0].cars : null
)
);
console.log(items)
Expected Output:
[
{
...
motorcycles: 'motorcycle01',
cars: 'cars01'
},
{
...
motorcycles: 'motorcycle01',
cars: 'cars01'
}
]
You can speed up the process by creating a Map of data01, keyed by userId.
And with Object.assign you can copy the properties from a match. This will not create the property if it doesn't exist in the source, so there will be no null assignments (unless the source has an explicit null):
let map = new Map(data01.map(o => [o.userId, o]));
items.forEach(a => Object.assign(a, map.get(a.userId)));
If you are only interested in a selection of properties, then create objects that only have those properties:
let map = new Map(data01.map(o =>
[o.userId, { cars: o.cars, motorcycles: o.motorcycles }]
));
items.forEach(a => Object.assign(a, map.get(a.userId)));
This second solution will always create the specific properties, also when they didn't exist yet. In that case their values will be undefined.
If your arrays are arrays of objects, and you need to consolidate 2+ arrays based on some property in the object, seems like the best thing to do would be to make an intermediate map that has its keys as the userIDs, and then just code something that will non-destructively update the map as you iterate through the arrays.
const data01 = [
{ userId: 0, motocycles: 'motocycle01', cars: 'car01' },
{ userId: 1, motocycles: 'motocycle02', cars: 'car02' },
{ userId: 2, cars: 'car03' },
{ userId: 3, motocycles: 'motocycle04' },
]
const data02 = [
{ userId: 0, dogs: 'doggy', cats: 'car01' },
{ userId: 1, dogs: 'doggo', cats: 'car02' },
{ userId: 2, dogs: 'sheperd' },
{ userId: 3, cats: 'kitty' },
]
function combineArrFromUserId(arr1,arr2){
const idMap= new Map()
data01.forEach(item=>checkAndAdd(item,idMap))
data02.forEach(item=>checkAndAdd(item,idMap))
return idMap.values()
}
function checkAndAdd(item,map){
const current = map.get(item.userId)
if(current){
map.set(item.userId,Object.assign(current,item))
} else {
map.set(item.userId, item)
}
}
console.log(combineArrFromUserId(data01,data02))

Using indexOf in order to filt an array - ( Next.js app with typescript )

I have this array in my next.js app
arr1
[
{
identifier: "60a17722225f2918c445fd19",
name: "Ben Awad",
_id: "60c94480b8d43c28d0a6eb73
},
{
identifier: "60a455d11fa62a1510b408f8",
name: "dev ed"
_id: "60bf62cede309f1a30fe88ab"
}
]
And i have this another big array
arr2
[
{
name: "Ben Awad",
_id: "60a17722225f2918c445fd19
},
{
name: "dev ed",
_id: "60a455d11fa62a1510b408f8"
},
{
name: "Katlyn",
_id: "60a52500ce96f30c14fdaff9"
},
{
name: "Mike",
_id: "60c95deeb8d43c28d0a6eb74"
},
{
name: "Kassandra",
_id: "60c960ddb8d43c28d0a6eb7a"
}
]
I want a new array who should have all users except for those who have similar ids with arr1
So this is the logic i did (Notice that arr1 and arr2 will change constantly)
Me = arr1
AllUsers = arr2
const LookFriends =
Me &&
AllUsers.filter(({ _id }) => {
return Me.friends.indexOf(_id) === -1;
});
console.log(LookFriends);
The output should be Katlyn, Mike and Kassandra, but the console.log says...
[
{
name: "Ben Awad",
_id: "60a17722225f2918c445fd19
},
{
name: "dev ed",
_id: "60a455d11fa62a1510b408f8"
},
{
name: "Katlyn",
_id: "60a52500ce96f30c14fdaff9"
},
{
name: "Mike",
_id: "60c95deeb8d43c28d0a6eb74"
},
{
name: "Kassandra",
_id: "60c960ddb8d43c28d0a6eb7a"
}
]
I'm really having a hard time trying to filter an array based on another array, what can i do ?
You need to use 'findIndex' in this case, then compare the _id field itself:
const LookFriends =
Me &&
AllUsers.filter(({ _id }) => {
return Me.friends.findIndex(friend => friend._id === _id) === -1;
});
If you use "indexOf", it will compare the entire object to just that _id value.
You can extract the identifiers from the first array and filter the second array if the element doesn't match any id in our object or our Set
// with an object
const ids = {}
arr1.forEach(user => {
ids[user.identifier] = true
})
const filtered = arr2.filter(user => !ids[user._id])
// or with a Set
const ids = new Set(arr1.map(user => user.identifier))
const filtered = arr2.filter(user => !ids.has(user._id))
Please have a try with this code.
Me &&
AllUsers.filter(({ _id }) => {
let bExist = false
Me.friends.map( (friend) => {
if ( friend._id === _id )
bExist = true
})
return bExist
});
Let me know if it works or not.

Merging two arrays of objects with one being used as a master overwriting any duplicates

I have two arrays of objects. Each object within that array has an array of objects.
I'm trying to merge the two arrays with one being used as a master, overwriting any duplicates in both the first level and the second 'option' level. Almost like a union join.
I've tried the code, however this doesn't cater for duplicate in options within a material.
Running this code results in two id: 400 options for the second material. When there should only be 1 with the value of 100cm.
Is there any smart way of doing this please? I also had a look at using sets, but again this only worked on the top level.
const materials_list = [
{
id: 2,
options: [
{
id: 300,
value: '50cm'
},
{
id: 400,
value: '75cm'
}
]
}
]
const master_materials_list = [
{
id: 1,
options: [
{
id: 200,
value: '50cm'
}
]
},
{
id: 2,
options: [
{
id: 400,
value: '100cm'
}
]
}
]
master_materials_list.forEach(masterMaterial => {
const matchMaterial = materials_list.find(existingMaterial => existingMaterial.id === masterMaterial.id);
if(matchMaterial) {
masterMaterial.options = masterMaterial?.options.concat(matchMaterial.options);
}
});
console.log(master_materials_list);
This is the desired output
[
{
id: 1,
options: [
{
id: 200,
value: '50cm'
}
]
},
{
id: 2,
options: [
{
id: 300,
name: '50cm'
},
{
id: 400,
name: '100cm'
}
]
}
]
Different approach that first makes a Map of the material_list options for o(1) lookup
Then when mapping the master list use filter() to find options stored in the above Map that don't already exist in the master
const materials_list=[{id:2,options:[{id:300,value:"50cm"},{id:400,value:"75cm"}]}, {id:999, options:[]}],
master_materials_list=[{id:1,options:[{id:200,value:"50cm"}]},{id:2,options:[{id:400,value:"100cm"}]}];
// store material list options array in a Map keyed by option id
const listMap = new Map(materials_list.map(o=>[o.id, o]));
// used to track ids found in master list
const masterIDs = new Set()
// map material list and return new objects to prevent mutation of original
const res = master_materials_list.map(({id, options, ...rest})=>{
// track this id
masterIDs.add(id)
// no need to search if the material list Map doesn't have this id
if(listMap.has(id)){
// Set of ids in this options array in master
const opIds = new Set(options.map(({id}) => id));
// filter others in the Map for any that don't already exist
const newOpts = listMap.get(id).options.filter(({id})=> !opIds.has(id));
// and merge them
options = [...options, ...newOpts]
}
// return the new object
return {id, options, ...rest};
});
// add material list items not found in master to results
listMap.forEach((v,k) =>{
if(!masterIDs.has(k)){
res.push({...v})
}
})
console.log(res)
.as-console-wrapper {max-height: 100%!important;top:0}
You can do this with lodash:
const materials_list = [
{
id: 2,
options: [
{
id: 300,
value: '50cm',
},
{
id: 400,
value: '75cm',
},
],
},
];
const master_materials_list = [
{
id: 1,
options: [
{
id: 200,
value: '50cm',
},
],
},
{
id: 2,
options: [
{
id: 400,
value: '100cm',
},
],
},
];
const customizer = (objValue, srcValue, propertyName) => {
if (propertyName === 'options') {
return _(srcValue)
.keyBy('id')
.mergeWith(_.keyBy(objValue, 'id'))
.values()
.value();
}
};
const merged = _(master_materials_list)
.keyBy('id')
.mergeWith(_.keyBy(materials_list, 'id'), customizer)
.values()
.value();
console.log(merged);
<script src="https://cdn.jsdelivr.net/npm/lodash#4.17.21/lodash.min.js"></script>
You’re going to have to filter matchMaterials.options before the concat. Something like:
matchMaterial.options = matchMaterial.options.filter(opt =>
masterMaterial.options.find(val => val.Id === opt.Id) == null;
);
This should remove any “duplicate” options from matchMaterial before the concat.
EDIT:
I did this on my phone so I’m sorry if the code is formatted weird like I’m seeing now

Javascript - Get a corresponding object between two arrays

I have the following arrays:
First array:
const dummyJSON = [
{
id: 1,
sponsor_date: '2020-08-16T22:45:03.154Z'
},
{
id: 2,
sponsor_date: '2020-09-16T22:45:03.154Z'
},
{
id: 3,
sponsor_date: '2020-09-01T22:45:03.154Z'
}
]
Second array:
const validated = [ true, false, false ]
And I wanted to get the object (dummyJSON.id) when the corresponding (validated) array item is true.
Basically, if the first item in the validate [0] array has a value of "true", then I would like to have the corresponding [0] item's id value in the dummyJSON array.
You can use Array#reduce to get array of validated ids.
It will basically loop over every element and if the index of currently iterated object corresponds to the truthy value inside validated with the very same index, the object's id will be pushed to the result.
const dummyJSON = [
{ id: 1, sponsor_date: '2020-08-16T22:45:03.154Z' },
{ id: 2, sponsor_date: '2020-09-16T22:45:03.154Z' },
{ id: 3, sponsor_date: '2020-09-01T22:45:03.154Z' }
];
const validated = [true, false, false];
const validatedIds = dummyJSON
.reduce((s, { id }, i) => (validated[i] ? s.push(id) : s, s), []);
console.log(validatedIds);
If your goal is just to get the validated items, use filter:
const valid = dummyJSON.filter((item, index) => validated[index]);
If you just want the ids, add a map call:
const valid = dummyJSON.filter((item, index) => validated[index]);
const ids = valid.map(x => x.id);
This could be done in a single line if you prefer, by chaining the map call:
const ids = dummyJSON.filter((item, index) => validated[index]).map(x => x.id);
const dummyJSON = [
{ id: 1, sponsor_date: '2020-08-16T22:45:03.154Z' },
{ id: 2, sponsor_date: '2020-09-16T22:45:03.154Z' },
{ id: 3, sponsor_date: '2020-09-01T22:45:03.154Z' }
];
const validated = [ true, false, false ];
// source objects
console.log(dummyJSON.filter((_, index) => validated[index]));
// just the ids
console.log(dummyJSON.filter((_, index) => validated[index]).map(x => x.id));
No need for reduce, filter can do that just as well and faster :
const validated = [ true, false, false ]
const dummyJSON = [
{
id: 1,
sponsor_date: '2020-08-16T22:45:03.154Z'
},
{
id: 2,
sponsor_date: '2020-09-16T22:45:03.154Z'
},
{
id: 3,
sponsor_date: '2020-09-01T22:45:03.154Z'
}
]
// To get all validated objects from dummy JSON
const validatedJSON = dummyJSON.filter((obj, index) => validated[index])
// To extract just the ID's
const validatedJSONIds = validatedJSON.map(json => json.id)

Get array of all unique object values based on property name

How can I get an array with all the unique values based on a property name?
In my case my object looks like this and I want an array with the unique documentID's.
const file = {
invoice: {
invoiceID: 1,
documentID: 5
},
reminders: [
{
reminderID: 1,
documentID: 1
},
{
reminderID: 2,
documentID: 1
}
]
}
The result should be an array [5, 1] //The unique documentID's are 5 and 1
It doesn't seem like possible to add a property name to the Object.values() function.
You can use Set to get unique documentID.
const file = {
invoice: {
invoiceID: 1,
documentID: 5
},
reminders: [
{
reminderID: 1,
documentID: 1
},
{
reminderID: 2,
documentID: 1
}
],
payments: {
documentID : 5
}
};
var keys = Object.keys(file).map(key=>file[key].map ? file[key].map(i=>i.documentID) : file[key].documentID)
var keysFlattened= [].concat.apply([], keys);
var unique = new Set(keysFlattened);
console.log(Array.from(unique));
I use something like this that does what you want I think
const keepUniqueBy = key => (array, item) => {
if (array.find(i => item[key] === i[key])) {
return array;
} else {
return [ ...array, item ];
}
};
Then you can simply: const unique = reminders.reduce(keepUniqueBy('documentID'))
NB: It's probably low performing, but for small arrays it doesn't matter.

Categories

Resources