I'm trying to extract all links IDs of the object array shown below. This is how I was trying to get that:
const linkIDs = array
.filter(d => d.links)
.map(d => d.links)
But this gives me a nested array, which is not what I wanted.
[
{
"id: "1",
"links": [
{
"id" : "Dn59y87PGhkJXpaiZ",
"type" : "article"
},
{
"id" : "PGhkJXDn59y87paiZ",
"type" : "article"
}
]
},
{
"id: "2",
"links": [
{
"id" : "GhkJXpaiZDn59y87P",
"type" : "article"
}
]
},
{
"id": "3"
}
]
So in this example I need the result
[ "Dn59y87PGhkJXpaiZ", "PGhkJXDn59y87paiZ", "GhkJXpaiZDn59y87P" ]
You can do like bellow, without using any other library.
var data = [
{
"id": "1",
"links": [
{
"id" : "Dn59y87PGhkJXpaiZ",
"type" : "article"
},
{
"id" : "PGhkJXDn59y87paiZ",
"type" : "article"
}
]
},
{
"id": "2",
"links": [
{
"id" : "GhkJXpaiZDn59y87P",
"type" : "article"
}
]
},
{
"id": "3"
}
];
var result = data.filter(e => e.links)
.map(e => e.links.map(link => link.id))
.reduce((a, b) => a.concat(b), []);
console.log(result);
I propose a more readable syntax in plain JS:
var data = [
{
"id": "1",
"links": [
{
"id" : "Dn59y87PGhkJXpaiZ",
"type" : "article"
},
{
"id" : "PGhkJXDn59y87paiZ",
"type" : "article"
}
]
},
{
"id": "2",
"links": [
{
"id" : "GhkJXpaiZDn59y87P",
"type" : "article"
}
]
},
{
"id": "3"
}
];
var result = data.filter(e => e.links)
.map(e => e.links)
.flat()
.map(e => e.id)
console.log(result);
You need to produce your array before mapping. Reduce in Js is very useful function ;)
arr = [
{
"id": "1",
"links": [
{
"id" : "Dn59y87PGhkJXpaiZ",
"type" : "article"
},
{
"id" : "PGhkJXDn59y87paiZ",
"type" : "article"
}
]
},
{
"id": "2",
"links": [
{
"id" : "GhkJXpaiZDn59y87P",
"type" : "article"
}
]
},
{
"id": "3"
}
];
var result = arr.filter(a=>a.links).reduce((acc, a) => {
return acc.concat(a.links)
}, []).map(a=>a.id);
console.log(result);
You can use lodash's flatMap() , where each filtered item is transformed using map().
DEMO
var data = [
{
"id": 1,
"links": [
{
"id": "Dn59y87PGhkJXpaiZ",
"type": "article"
},
{
"id": "PGhkJXDn59y87paiZ",
"type": "article"
}
]
},
{
"id": "2",
"links": [
{
"id": "GhkJXpaiZDn59y87P",
"type": "article"
}
]
},
{
"id": "3"
}
];
var result = _.flatMap(data, item =>
_(item.links)
.map(v => (v.id))
.value()
);
console.log(result);
<script src="https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.12.0/lodash.js"></script>
Related
I have an object that looks something like this (it is an Avro Schema):
{
"type": "record",
"namespace": "company.car.v1",
"name": "CarV1",
"fields": [
{
"name": "plateNumber",
"type": "string"
},
{
"name": "ownerId",
"type": "string"
},
{
"name" : "details",
"type" : {
"type" : "record",
"name" : "DetailsV1",
"fields" : [
{
"name": "engine",
"type": {
"type": "record",
"name": "EngineV1",
"fields": [
{
"name": "size",
"type": "int",
"default": 0
},
{
"name": "valvesCount",
"type": "int",
"default": 0
}
]
}
},
{
"name" : "color",
"type" : "string",
"default" : "NONE"
},
{
"name" : "rimSize",
"type" : "int",
"default" : "NONE"
}
]},
"default" : {}
},
{
"name": "isBrandNew",
"type": "boolean"
}
]
}
My main goal is to have a function that takes such an object as input and extract only some particular fields and produce a subSet of this schema.
So a function that looks something like this reduceSchema(avroSchema, [paths])
For example the:
function reduceSchema(avroSchemaOnTop, ['ownerId', 'details.engine.size']
And then this would produce the desired output:
{
"type":"record",
"namespace":"company.car.v1",
"name":"CarV1",
"fields":[
{
"name":"ownerId",
"type":"string"
},
{
"name":"details",
"type":{
"type":"record",
"name":"DetailsV1",
"fields":[
{
"name":"engine",
"type":{
"type":"record",
"name":"EngineV1",
"fields":[
{
"name":"size",
"type":"int",
"default":0
}
]
}
}
]
},
"default":{}
}
]
}
Currently I am able to attach a property keepThisField to every field (and its parent-tree) that I want to keep, in this case the details.engine.size and ownerId
{
"type": "record",
"namespace": "company.car.v1",
"name": "CarV1",
"fields": [
{
"name": "plateNumber",
"type": "string"
},
{
"name": "ownerId",
"type": "string",
"keepThisField": "true"
},
{
"name" : "details",
"keepThisField": "true"
"type" : {
"type" : "record",
"name" : "DetailsV1",
"fields" : [
{
"name": "engine",
"type": {
"type": "record",
"name": "EngineV1",
"fields": [
{
"name": "size",
"type": "int",
"default": 0,
"keepThisField": "true"
},
{
"name": "valvesCount",
"type": "int",
"default": 0
}
]
}
},
{
"name" : "color",
"type" : "string",
"default" : "NONE"
},
{
"name" : "rimSize",
"type" : "int",
"default" : "NONE"
}
]},
"default" : {}
},
{
"name": "isBrandNew",
"type": "boolean"
}
]
}
What I need now is a mechanism to be able to remove all other fields (in a deeply nested manner) that don't have the property keepThisField in them, and then after the keepThisField property itself. So we are left with the desired output.
Anyone have an idea how one can achieve the removal process in a generic manner in JavaScript?
Update:
This is what I have tried with the flatMap:
function fn(o) {
const hasMore = _.get(o, 'type.fields');
if (o.keepThisField === true) {
if (hasMore) {
const retObj = {
...o,
type: {
...o.type,
fields: _.flatMap(o.type.fields, fn),
}
};
_.unset(retObj, 'keepThisField');
return retObj;
}
const cpO = o;
_.unset(cpO, 'keepThisField');
return ({
...cpO,
});
}
return [];
}
parentSchema.fields = _.flatMap(parentSchema.fields, fn)
[Answering my own question]
A potential solution for the removal part of the process. Credits to #Kinglish for the reference to this Stack Overflow question to use the flatMap.
function fn(o) {
const hasMore = _.get(o, 'type.fields');
if (o.keepThisField === true) {
if (hasMore) {
const retObj = {
...o,
type: {
...o.type,
fields: _.flatMap(o.type.fields, fn),
}
};
_.unset(retObj, 'keepThisField');
return retObj;
}
const cpO = o;
_.unset(cpO, 'keepThisField');
return ({
...cpO,
});
}
return [];
}
parentSchema.fields = _.flatMap(parentSchema.fields, fn)
In post index, postid is primary key and userid is foreign key.
i want all post but only post from one userid, such that only one user have the one post in results sort by postdate(optional latest first)
//Actual Result
[
{
userid: "u1",
postid: "p1"
},
{
userid: "u1",
postid: "p2"
},
{
userid: "u2",
postid: "p3"
},
{
userid: "u3",
postid: "p4"
},
{
userid: "u3",
postid: "p5"
},
{
userid: "u3",
postid: "p6"
}
]
needed as below
//Expecting Result
[
{
userid: "u1",
postid: "p1"
},
{
userid: "u2",
postid: "p3"
},
{
userid: "u3",
postid: "p4"
}
]
I think you can use top hit for this. Here the sample for this :
DELETE my-index-000001
PUT my-index-000001
{
"mappings": {
"properties": {
"userid": {
"type": "keyword"
},
"postid": {
"type": "keyword"
},
"postdate": {
"type": "date"
}
}
}
}
PUT my-index-000001/_doc/1
{"userid": "u1", "postid": "p1", "postdate": "2021-03-01"}
PUT my-index-000001/_doc/2
{"userid": "u1", "postid": "p2", "postdate": "2021-03-02"}
PUT my-index-000001/_doc/3
{"userid": "u2", "postid": "p3", "postdate": "2021-03-03"}
PUT my-index-000001/_doc/4
{"userid": "u3", "postid": "p4", "postdate": "2021-03-04"}
PUT my-index-000001/_doc/5
{"userid": "u3", "postid": "p5", "postdate": "2021-03-05"}
PUT my-index-000001/_doc/6
{"userid": "u3", "postid": "p6", "postdate": "2021-03-06"}
These are the sample index creating steps. And here the query :
GET my-index-000001/_search
{
"size": 0,
"aggs": {
"top_users": {
"terms": {
"field": "userid",
"size": 100
},
"aggs": {
"top": {
"top_hits": {
"sort": [
{
"postdate": {
"order": "desc"
}
}
],
"_source": {
"includes": [ "postdate", "postid" ]
},
"size": 1
}
}
}
}
}
}
And, inside the resultset you can see the top post for the every users inside the aggregations:
{
"took" : 3,
"timed_out" : false,
"_shards" : {
"total" : 1,
"successful" : 1,
"skipped" : 0,
"failed" : 0
},
"hits" : {
"total" : {
"value" : 6,
"relation" : "eq"
},
"max_score" : null,
"hits" : [ ]
},
"aggregations" : {
"top_users" : {
"doc_count_error_upper_bound" : 0,
"sum_other_doc_count" : 0,
"buckets" : [
{
"key" : "u3",
"doc_count" : 3,
"top" : {
"hits" : {
"total" : {
"value" : 3,
"relation" : "eq"
},
"max_score" : null,
"hits" : [
{
"_index" : "my-index-000001",
"_type" : "_doc",
"_id" : "6",
"_score" : null,
"_source" : {
"postdate" : "2021-03-06",
"postid" : "p6"
},
"sort" : [
1614988800000
]
}
]
}
}
},
{
"key" : "u1",
"doc_count" : 2,
"top" : {
"hits" : {
"total" : {
"value" : 2,
"relation" : "eq"
},
"max_score" : null,
"hits" : [
{
"_index" : "my-index-000001",
"_type" : "_doc",
"_id" : "2",
"_score" : null,
"_source" : {
"postdate" : "2021-03-02",
"postid" : "p2"
},
"sort" : [
1614643200000
]
}
]
}
}
},
{
"key" : "u2",
"doc_count" : 1,
"top" : {
"hits" : {
"total" : {
"value" : 1,
"relation" : "eq"
},
"max_score" : null,
"hits" : [
{
"_index" : "my-index-000001",
"_type" : "_doc",
"_id" : "3",
"_score" : null,
"_source" : {
"postdate" : "2021-03-03",
"postid" : "p3"
},
"sort" : [
1614729600000
]
}
]
}
}
}
]
}
}
}
Assuming an index mapping of the form:
PUT user_posts
{
"mappings": {
"properties": {
"userid": {
"type": "keyword"
},
"postid": {
"type": "keyword"
},
"postdate": {
"type": "date"
}
}
}
}
You could:
aggregate on the userid and order the IDs alphabetically
sub-aggregate on the postid and sort the post by posttime descending via a max aggregation.
filter the response through the filter_path option to only retrieve what you need
POST user_posts/_search?filter_path=aggregations.*.buckets.key,aggregations.*.buckets.*.buckets.key
{
"size": 0,
"aggs": {
"by_userid": {
"terms": {
"field": "userid",
"order": {
"_key": "asc"
},
"size": 100
},
"aggs": {
"by_latest_postid": {
"terms": {
"field": "postid",
"size": 1,
"order": {
"latest_posttime": "desc"
}
},
"aggs": {
"latest_posttime": {
"max": {
"field": "postdate"
}
}
}
}
}
}
}
}
Yielding:
{
"aggregations" : {
"by_userid" : {
"buckets" : [
{
"key" : "u1",
"by_latest_postid" : {
"buckets" : [
{
"key" : "p1"
}
]
}
},
{
"key" : "u2",
"by_latest_postid" : {
"buckets" : [
{
"key" : "p3"
}
]
}
},
{
"key" : "u3",
"by_latest_postid" : {
"buckets" : [
{
"key" : "p4"
}
]
}
}
]
}
}
}
which you can then post-process as you normally would:
...
const response = await ...; // transform the above request for use in the ES JS lib of your choice
const result = response.aggregations.by_userid.buckets.map(b => {
return {
userid: b.key,
postid: b.by_latest_postid.buckets && b.by_latest_postid.buckets[0].key
}
})
You can use the top hits sub-aggregation. So first do a terms aggregation by userId, then you can use top-hits with a sort by post-date to get the latest post by each user.
I should say that if you have many userIds and you want the top hit for each one, you should probably use composite aggregation as your top-level agg, and not terms.
I have list of CutomerType objects and Customer object. Customer object has the cutomerType id property on it. Based on the customer type id on customer object I have to loop over or map the right customerType object and disaplay the name code.
[ {
"id" : "5436d5fd-e3ea-4e09-be4a-a80967cd72e5",
"code" : "0",
"name" : "UN"
}, {
"id" : "674b76b8-f1ac-5c14-e053-ce5e1cac867d",
"code" : "1",
"name" : "NON-UN"
}, {
"id" : "674b76b8-f1ad-5c14-e053-ce5e1cac867d",
"code" : "2",
"name" : "COS-UN"
}, {
"id" : "674b76b8-f1ae-5c14-e053-ce5e1cac867d",
"code" : "NA",
"name" : NA"
} ]
Customer
{
"id" : "1",
"name": "Jhon",
"type": "5436d5fd-e3ea-4e09-be4a-a80967cd72e5",
}
This is what you could do.
const customerCodeArray = [{
"id": "5436d5fd-e3ea-4e09-be4a-a80967cd72e5",
"code": "0",
"name": "UN"
}, {
"id": "674b76b8-f1ac-5c14-e053-ce5e1cac867d",
"code": "1",
"name": "NON-UN"
}, {
"id": "674b76b8-f1ad-5c14-e053-ce5e1cac867d",
"code": "2",
"name": "COS-UN"
}, {
"id": "674b76b8-f1ae-5c14-e053-ce5e1cac867d",
"code": "NA",
"name": "NA"
}]
const customer = {
"id": "1",
"name": "Jhon",
"type": "5436d5fd-e3ea-4e09-be4a-a80967cd72e5",
};
const getCustomerCode = (type) => {
const filterList = customerCodeArray.filter((obj) => obj.id === type);
if (filterList.length > 0) {
return filterList[0];
}
}
console.log(getCustomerCode(customer.type));
Hope this is clear, if not feel free to ask
const loop = // whole array;
const customer = // customer object
loop.find(el => el.id === customer.type).name
const filteredResult = customerCodeArray.filter(type => type.id === Customer.type);
console.log(filteredResult[0].name);
I have a array like this->
var jsonResponse = [
{
"name": "abc",
"value": [
{ "label" : "Daily", "value":"Daily"}
]
},
{
"name": "ccc",
"value": [
{ "label" : "Daily", "value":"Daily"}
]
}
]
And I want to convert it to ->
{
"abc" : {
"name": "abc",
"value": [
{ "label" : "Daily", "value":"Daily"}
]
},
"ccc": {
"name": "ccc",
"value": [
{ "label" : "Daily", "value":"Daily"}
]
}
]
Probably I dont want foreach.
We can do partial with Object.assign( arrayDetails, ...jsonResponse);
But how to do object index?
let indexedResult = {};
jsonResponse.map(obj => indexedResult[obj.name] = obj)
console.log(JSON.stringify(indexedResult));
I have a collection personemail with array of documents
{ "Name" : "A", "Email" : [ { "EmailAddress" : "a#home.com", "EmailType" : "Home" }, { "EmailAddress" : "a#work.com", "EmailType" : "Work" } ] }
{ "Name" : "B", "Email" : [ { "EmailAddress" : "b#work.com", "EmailType" : "Work" } ] }
{ "Name" : "C" }
I have to display data in below format, ie if we a person is not having home / work address , it should be displayed as "No data" if home or work email is not available preferably in same order (Home email first , Work email second)
{ "Name" : "A", "Email" : [ { "EmailAddress" : "a#home.com", "EmailType" : "Home" }, { "EmailAddress" : "a#work.com", "EmailType" : "Work" } ] }
{ "Name" : "B", "Email" : [ { "EmailAddress" : "No Data", "EmailType" : "Home" }, { "EmailAddress" : "b#work.com", "EmailType" : "Work" } ] }
{ "Name" : "C", "Email" : [ { "EmailAddress" : "No Data", "EmailType" : "Home" }, { "EmailAddress" : "No Data", "EmailType" : "Work" } ] }
I thought of implementing this with $unwind or $switch function . Suggestions please
It's possible:
db.collection.aggregate([
{ "$project": {
"Name": 1,
"Email": {
"$map": {
"input": ["Home", "Work"],
"as": "m",
"in": {
"$ifNull": [
{ "$arrayElemAt": [
{ "$filter": {
"input": "$Email",
"as": "e",
"cond": { "$eq": [ "$$e.EmailType", "$$m" ] }
}},
0
]},
{ "EmailAddress": "No Data", "EmailType": "$$m" }
]
}
}
}
}}
])
Which produces:
{
"_id" : ObjectId("591e7d66cb1a9433dab779cc"),
"Name" : "A",
"Email" : [
{
"EmailAddress" : "a#home.com",
"EmailType" : "Home"
},
{
"EmailAddress" : "a#work.com",
"EmailType" : "Work"
}
]
}
{
"_id" : ObjectId("591e7d66cb1a9433dab779cd"),
"Name" : "B",
"Email" : [
{
"EmailAddress" : "No Data",
"EmailType" : "Home"
},
{
"EmailAddress" : "b#work.com",
"EmailType" : "Work"
}
]
}
{
"_id" : ObjectId("591e7d66cb1a9433dab779ce"),
"Name" : "C",
"Email" : [
{
"EmailAddress" : "No Data",
"EmailType" : "Home"
},
{
"EmailAddress" : "No Data",
"EmailType" : "Work"
}
]
}
But personally I'd do that transformation in client code, since it's less obtuse. As a quick shell example:
db.collection.find().forEach(doc => {
doc.Email = ["Home", "Work"].map( m => {
if (!doc.hasOwnProperty('Email'))
doc.Email = [];
var val = doc.Email.filter(e => e.EmailType === m)[0];
return (val == null) ? { "EmailAddress": "No Data", "EmailType": m }
: val;
});
printjson(doc);
})
Basically though you use your "uniform" array of "Home" and "Work" as a source to $map, then lookup the elements in the documents current Email array. anything that returned a null on the lookup is swapped with the default data.