Given that I have the following JSON object,
dbConfig = {
"db": "default",
"default": {
"defaultDB": "sqlite",
"init": "init",
"migrations": {
"directory": "migrations",
"tableName": "migrations"
},
"pool": {
"min": "2",
"max": "10"
},
"sqlite": {
"client": "sqlite3",
"connection": {
"filename": "data/default/sqlitedb/test.db"
}
},
"oracle": {
"client": "oracledb",
"config": {
"development": {
"user": "test",
"pass": "test",
"db": "test"
},
"production": {
"user": "test",
"pass": "test",
"db": "test"
},
"test": {
"user": "test",
"pass": "test",
"db": "test"
}
}
}
}
};
Using Node & Lodash, is there any possibility of getting either connection or config. depending on what dbConfig.default[dbConfig.default.defaultDB] is set to.
So for instance if i set dbConfig.default.defaultDB=oracledb and process.env.NODE_ENV=development I want to be able to get dbConfig.default[dbConfig.default.defaultDB].config.development
Or if I set dbConfig.default.defaultDB=sqlite just to get dbConfig.default[dbConfig.default.defaultDB].connection
In other words, if the database has environment specific configuration then this will be in "config": {} and if not in "connection": {}
It doesn't have to be Lodash. It can also be plain javascript.
Solution without lodash
var defaultDbName = dbConfig.default[dbConfig.default.defaultDB];
var db;
if (defaultDb === 'sqllite') {
db = dbConfig.default[defaultDb].connection;
} else {
var env = process.env.NODE_ENV;
db = dbConfig.default[defaultDb].config[env];
}
Solution with lodash
Here I'm using lodash get function to get object field value or null if it doesn't exist. Also I'm using template string syntax: ${val} to format field path.
var defaultDbName = dbConfig.default[dbConfig.default.defaultDB];
var defaultDbConf = dbConfig.default[defaultDb];
var env = process.env.NODE_ENV;
var db = defaultDbConf.connection || _.get(defaultDbConf, `config.${env}`);
Btw, your configuration json is too complex, much better to have configuration per environment.
Solution without [dependencies] (originally answered here, but not AngularJS-specific)
Your JSON is complex, yes, but it could also be smaller and more readable without all the duplication, where each environment has the same set of attributes, which may or may not vary, and would be needlessly duplicated.
With a simple algorithm (jsFiddle) you can dynamically parse your JSON configuration for specific property-name suffixes (property#suffix) and have a catalogue of environment-varying properties alongside non-varying properties, without artificially structuring your configuration and without repetition, including deeply-nested configuration objects.
You can also mix-and-match suffixes and combine any number of environmental or other arbitrary factors to groom your configuration object.
Example, snippet of pre-processed JSON config:
var config = {
'help': {
'BLURB': 'This pre-production environment is not supported. Contact Development Team with questions.',
'PHONE': '808-867-5309',
'EMAIL': 'coder.jen#lostnumber.com'
},
'help#www.productionwebsite.com': {
'BLURB': 'Please contact Customer Service Center',
'BLURB#fr': 'S\'il vous plaît communiquer avec notre Centre de service à la clientèle',
'BLURB#de': 'Bitte kontaktieren Sie unseren Kundendienst!!1!',
'PHONE': '1-800-CUS-TOMR',
'EMAIL': 'customer.service#productionwebsite.com'
},
}
... and post-processed (given location.hostname='www.productionwebsite.com' and navigator.language of 'de'):
prefer(config,['www.productionwebsite.com','de']); // prefer(obj,string|Array<string>)
JSON.stringify(config); // {
'help': {
'BLURB': 'Bitte kontaktieren Sie unseren Kundendienst!!1!',
'PHONE': '1-800-CUS-TOMR',
'EMAIL': 'customer.service#productionwebsite.com'
}
}
Obviously you can pull those values at render-time with location.hostname and window.navigator.language. The algorithm to process the JSON itself isn't terribly complex (but you may still feel more comfortable with an entire framework for some reason, instead of a single function):
function prefer(obj,suf) {
function pr(o,s) {
for (var p in o) {
if (!o.hasOwnProperty(p) || !p.split('#')[1] || p.split('##')[1] ) continue; // ignore: proto-prop OR not-suffixed OR temp prop score
var b = p.split('#')[0]; // base prop name
if(!!!o['##'+b]) o['##'+b] = 0; // +score placeholder
var ps = p.split('#')[1].split('&'); // array of property suffixes
var sc = 0; var v = 0; // reset (running)score and value
while(ps.length) {
// suffix value: index(of found suffix in prefs)^10
v = Math.floor(Math.pow(10,s.indexOf(ps.pop())));
if(!v) { sc = 0; break; } // found suf NOT in prefs, zero score (delete later)
sc += v;
}
if(sc > o['##'+b]) { o['##'+b] = sc; o[b] = o[p]; } // hi-score! promote to base prop
delete o[p];
}
for (var p in o) if(p.split('##')[1]) delete o[p]; // remove scores
for (var p in o) if(typeof o[p] === 'object') pr(o[p],s); // recurse surviving objs
}
if( typeof obj !== 'object' ) return; // validate
suf = ( (suf || suf === 0 ) && ( suf.length || suf === parseFloat(suf) ) ? suf.toString().split(',') : []); // array|string|number|comma-separated-string -> array-of-strings
pr(obj,suf.reverse());
}
The property name suffix can have any number of suffixes after the '#', delimited by '&' (ampersand) and, where there are two properties with different but preferred suffixes, will be preferred in the order in which they are passed to the function. Suffixes that contain BOTH preferred strings will be preferred above all others. Suffixes found in the JSON that are not specified as preferred will be discarded.
Preference/discrimination will be applied top-down on your object tree, and if higher-level objects survive, they will be subsequently inspected for preferred suffixes.
With this approach, your JSON (I'm making some assumptions about which attributes vary between your environments and which do not) might be simplified as follows:
dbConfig = {
"pool": {
"min": "2",
"max": "10"
},
"init": "init",
"migrations": {
"directory": "migrations",
"tableName": "migrations"
},
"db":
"client": "sqlite",
"filename": "data/default/sqlitedb/development.db"
"filename#tst": "data/default/sqlitedb/test.db"
"filename#prd": "data/default/sqlitedb/production.db"
},
"db#oracle": {
"client": "oracle",
"user": "devuser",
"user#tst": "testdbuser",
"user#prd": "testdbuser",
"pass": "devpass",
"pass#tst": "testdbpass",
"pass#prd": "testdbpass",
"db": "devdb",
"db#tst": "testdbschema",
"db#prd": "testdbschema"
}
};
So that you could feed this into the prefer() function with these args+results:
for sqlite, test env:
prefer(dbConfig,'tst');
JSON.stringify(dbConfig); // dbConfig: {
"pool": {
"min": "2",
"max": "10"
},
"init": "init",
"migrations": {
"directory": "migrations",
"tableName": "migrations"
},
"db": {
"client": "sqlite",
"filename": "data/default/sqlitedb/test.db"
}
};
for oracle, default/development environment:
prefer(dbConfig,'oracle'); // oracle, dev(default) env
JSON.stringify(dbConfig); // dbConfig: {
"pool": {
"min": "2",
"max": "10"
},
"init": "init",
"migrations": {
"directory": "migrations",
"tableName": "migrations"
},
"db": {
"client": "oracle",
"user": "devdbuser",
"pass": "devdbpass",
"db": "devdbschema"
}
};
prefer(dbConfig,'oracle,prd'); // oracle, production env
JSON.stringify(dbConfig); // dbConfig: {
"pool": {
"min": "2",
"max": "10"
},
"init": "init",
"migrations": {
"directory": "migrations",
"tableName": "migrations"
},
"db": {
"client": "oracle",
"user": "prddbuser",
"pass": "prddbpass",
"db": "prddbschema"
}
};
Abstract usage and examples:
var o = { 'a':'apple', 'a#dev':'apple-dev', 'a#fr':'pomme',
'b':'banana', 'b#fr':'banane', 'b#dev&fr':'banane-dev',
'c':{ 'o':'c-dot-oh', 'o#fr':'c-point-oh' }, 'c#dev': { 'o':'c-dot-oh-dev', 'o#fr':'c-point-oh-dev' } };
/*1*/ prefer(o,'dev'); // { a:'apple-dev', b:'banana', c:{o:'c-dot-oh-dev'} }
/*2*/ prefer(o,'fr'); // { a:'pomme', b:'banane', c:{o:'c-point-oh'} }
/*3*/ prefer(o,'dev,fr'); // { a:'apple-dev', b:'banane-dev', c:{o:'c-point-oh-dev'} }
/*4*/ prefer(o,['fr','dev']); // { a:'pomme', b:'banane-dev', c:{o:'c-point-oh-dev'} }
/*5*/ prefer(o); // { a:'apple', b:'banana', c:{o:'c-dot-oh'} }
Caveats
Usage of the # in property name is NOT standard and is invalid in dot-notation, but so far has not broken any browsers we've tested this in. The UPSIDE of this is that it prevents developers from expecting they can refer to your pre-processed, suffixed attributes. A developer would have to be aware of, and a bit unconventional and refer to your attribute as a string (obj['key#suf']) to do that, which, by the way, is the reason this function is possible.
If future JavaScript engines reject it, substitute for any other tolerable convention, just be consistent.
This algorithm has not been profiled for performance, or rigorously tested for other potential problems.
In its current form, used one-time on startup/load, we have yet to run into problems with.
As always, YMMV.
Related
Goal
I want to develop a middleware in TypeScript that filters the response of a REST API and returns only defined properties.
It should work generically, i.e. independent of specific entities. Neither their properties nor the exact depth (e.g. with any number of relations) should be necessarily known.
Example
An author has any number of articles with any number of comments.
[
{
"name": "John Doe",
"email": "john#doe.com",
"articles": [
{
"title": "Lalilu 1",
"text:": "la li lu",
"comments": [
{
"author": "Bendthatdict Cumberstone",
"text": "Great article!"
},
{
"author": "Bendthatdict Cumberstone",
"text": "Great article!"
}
]
},
{
"title": "Lalilu 1",
"text:": "la li lu",
"comments": [
{
"author": "Bendthatdict Cumberstone",
"text": "Great article!"
},
{
"author": "Bendthatdict Cumberstone",
"text": "Great article!"
}
]
}
]
},
{
"name": "Jane Doe",
"email": "jane#doe.com",
"articles": [
{
"title": "Lalilu 1",
"text:": "la li lu",
"comments": [
{
"author": "Bendthatdict Cumberstone",
"text": "Great article!"
},
{
"author": "Bendthatdict Cumberstone",
"text": "Great article!"
}
]
},
{
"title": "Lalilu 1",
"text:": "la li lu",
"comments": [
{
"author": "Bendthatdict Cumberstone",
"text": "Great article!"
},
{
"author": "Bendthatdict Cumberstone",
"text": "Great article!"
}
]
}
]
}
]
Now I want to specify that it should return everything except the "text" of each article and the "author" of each comment.
Syntax could look like this with glob notation:
select("*,!articles.text,!articles.comments.author")
Approach
For objects and nested objects it is quite simple, e.g. with pick() and omit() of "lodash", but I fail when arrays step into the game.
I did some research and came across packages such as json-mask, node-glob or glob-object but none of them exactly met my needs and I was not able to combine them for success.
Question
What is the most efficient way to generically filter an arbitrarily nested JSON with any number of further objects / arrays?
Also, how could the TypeScripts type system be used to advantage?
I would be very grateful for general coding approaches or even tips for a package that can already do this!
In short I would break this up into functions. You could create helpers that do more or less what you want with a string/filter as you show however I'd work it in reverse. Get a nice way to iterate so any post processing can be done, then build your helpers as you wish against that. Here's what I mean:
Example
export interface IComment {
author: string;
text: string;
}
export interface IArticle {
title: string;
text: string;
comments: IComment[];
}
export interface IComposer {
name: string,
email: string,
articles: IArticle[];
}
// Remove items from list for brevity sake...
const authorList = [
{
"name": "John Doe",
"email": "john#doe.com",
"articles": [
{
"title": "Lalilu 1",
"text": "la li lu",
"comments": [
{
"author": "Bendthatdict Cumberstone",
"text": "Great article!"
}
]
}
]
}
] as IComposer[];
/**
* Accepts JSON string or array of type.
*
* #param arr a JSON string containing array of type or array of type.
*/
export function selectFrom<T extends Record<string, any>>(arr: string | T[]) {
// If you want to use this route I would suggest
// also a function to validate that the JSON is
// shaped correctly.
if (typeof arr === 'string')
arr = JSON.parse(arr);
const collection = arr as T[];
const api = {
filters: [],
register,
run
};
/**
* Register a search op.
* #param fn function returning whether or not to filter the result.
*/
function register(fn: (obj: T) => Partial<T>) {
if (typeof fn === 'function')
api.filters.push(fn);
return api;
}
/**
* Run registered ops and filter results.
*/
function run() {
return collection.reduce((results, obj) => {
let result = obj;
// Don't use reducer here as you can't break
// and would unnecessarily loop through filters
// that have no need to run, use for of instead.
for (const filter of api.filters) {
// if we set the result to null
// don't continue to run filters.
if (!result) break;
// Pipe in the previous result, we start with
// original object but it's shape could change
// so we keep iterating with the previous result.
const filtered = filter(result);
// update the result.
if (filtered)
result = filtered;
}
if (result)
results.push(result);
return results;
// If changing the object you're going to
// end up with partials of the original
// shape or interface.
}, [] as Partial<T>[]);
}
return api;
}
Usage
By making this function based at the core you have a lot more flexibility. From there you could make a simple helper that maps your Glob or SQL like string to the pre-defined filter functions. Let me know if you have further questions.
const filtered =
selectFrom(authorList)
.register((composer) => {
composer.articles = composer.articles.map(article => {
const { text, ...filteredArticle } = article;
filteredArticle.comments = filteredArticle.comments.map(comment => {
const { author, ...filteredComment } = comment;
return filteredComment as typeof comment;
});
// Note setting to type of IArticle here so typescript
// doesn't complain, this is because you are removing props
// above so the shape changes so you may want to consider
// setting the props you plan to strip as optional or make
// everything a partial etc. I'll leave that to you to decide.
return filteredArticle as typeof article;
});
return composer;
})
.run();
What's Next
From here to get where you want it's about string parsing. Keep in mind Lodash does support gets down into nested values in an array. You can see this here in the docs.
Given that you could leverage Lodash using both _.get _.omit... etc along with a little parsing using dot notation.
Done this very thing with permissions. As such I feel strongly you need to start with a simple api to process then from there make your map from either Glob like or SQL string to those helpers.
I have a response from a web service and want to replace some values in the response with my custom values.
One way is to write a tree traverser and then check for the value and replace with my custom value
so the response is some what like this:
[
{
"name": "n1",
"value": "v1",
"children": [
{
"name": "n2",
"value": "v2"
}
]
},
{
"name": "n3",
"value": "v3"
}
]
now my custom map is like this
const map = {
"v1": "v11",
"v2": "v22",
"v3": "v33"
};
All I want is
[
{
"name": "n1",
"value": "v11",
"children": [
{
"name": "n2",
"value": "v22"
}
]
},
{
"name": "n3",
"value": "v33"
}
]
I was thinking if I could stringify my response and then replace values using a custom build regex from my map of values.
Will it be faster as compared to tree traverser?
If yes, how should I do that?
somewhat like this
originalString.replace(regexp, function (replacement))
The tree traversal is faster
Note that some things could be done more efficiently in the regex implementation but I still think there are some more bottlenecks to explain.
Why the regex is slow:
There are probably many more reasons why the regex is slower but I'll explain at least one significant reason:
When you're using regex to find and replace, you're using creating new strings every time and performing your matches every time. Regex expressions can be very expensive and my implementation isn't particularly cheap.
Why is the tree traversal faster:
In the tree traversal, I'm mutating the object directly. This doesn't require creating new string objects or any new objects at all. We're also not performing a full search on the whole string every time as well.
RESULTS
run the performance test below. The test using console.time to record how long it takes. See the the tree traversal is much faster.
function usingRegex(obj, map) {
return JSON.parse(Object.keys(map).map(oldValue => ({
oldValue,
newValue: map[oldValue]
})).reduce((json, {
oldValue,
newValue
}) => {
return json.replace(
new RegExp(`"value":"(${oldValue})"`),
() => `"value":"${newValue}"`
);
}, JSON.stringify(obj)));
}
function usingTree(obj, map) {
function traverse(children) {
for (let item of children) {
if (item && item.value) {
// get a value from a JS object is O(1)!
item.value = map[item.value];
}
if (item && item.children) {
traverse(item.children)
}
}
}
traverse(obj);
return obj; // mutates
}
const obj = JSON.parse(`[
{
"name": "n1",
"value": "v1",
"children": [
{
"name": "n2",
"value": "v2"
}
]
},
{
"name": "n3",
"value": "v3"
}
]`);
const map = {
"v1": "v11",
"v2": "v22",
"v3": "v33"
};
// show that each function is working first
console.log('== TEST THE FUNCTIONS ==');
console.log('usingRegex', usingRegex(obj, map));
console.log('usingTree', usingTree(obj, map));
const iterations = 10000; // ten thousand
console.log('== DO 10000 ITERATIONS ==');
console.time('regex implementation');
for (let i = 0; i < iterations; i += 1) {
usingRegex(obj, map);
}
console.timeEnd('regex implementation');
console.time('tree implementation');
for (let i = 0; i < iterations; i += 1) {
usingTree(obj, map);
}
console.timeEnd('tree implementation');
Will it be faster as compared to tree traverser?
I don't know. I think it would depend on the size of the input, and the size of the replacement map. You could run some tests at JSPerf.com.
If yes, how should I do that?
It's fairly easy to do with a regex-based string replacement if the values you are replacing don't need any special escaping or whatever. Something like this:
const input = [
{
"name": "n1",
"value": "v1",
"children": [
{
"name": "n2",
"value": "v2"
}
]
},
{
"name": "n3",
"value": "v3"
}
];
const map = {
"v1": "v11",
"v2": "v22",
"v3": "v33"
};
// create a regex that matches any of the map keys, adding ':' and quotes
// to be sure to match whole property values and not property names
const regex = new RegExp(':\\s*"(' + Object.keys(map).join('|') + ')"', 'g');
// NOTE: if you've received this data as JSON then do the replacement
// *before* parsing it, don't parse it then restringify it then reparse it.
const json = JSON.stringify(input);
const result = JSON.parse(
json.replace(regex, function(m, key) { return ': "' + map[key] + '"'; })
);
console.log(result);
definitely traverser go faster as string replace means travels against each characters in the final string as opposed to iterator that can skips no necessarily item.
The server I'm working with changed the REST format from plain JSON:
{
"removedVertices": [
{
"id": "1",
"info": {
"host": "myhost",
"port": "1111"
},
"name": "Roy",
"type": "Worker"
}
],
"id": "2",
"time": 1481183401573
}
To Jackson format:
{
"removedVertices": [
"java.util.ArrayList",
[
{
"id": "1",
"info": [
"java.util.HashMap",
{
"host": "myhost",
"port": "1111"
}
]
"name": "Roy",
"type": "Worker",
}
]
"id": "2",
"time": 1482392323858
}
How can I parse it the way it was before in Angular/Javascript?
Assuming only arrays are affected, I would use underscore.js and write a recursive function to remove the Jackson type.
function jackson2json(input) {
return _.mapObject(input, function(val, key) {
if (_.isArray(val) && val.length > 1) {
// discard the Jackson type and keep the 2nd element of the array
return val[1];
}
else if (_.isObject(val)) {
// apply the transformation recursively
return jackson2json(val);
}
else {
// keep the value unchanged (i.e. primitive types)
return val;
}
});
}
If the api should be restful, then the server should not return none plain json results. I think the server site need to fix that.
I think it is because the server enabled the Polymorphic Type Handling feature.
Read Jackson Default Typing for object containing a field of Map and JacksonPolymorphicDeserialization.
Disable the feature and you will get result identical to plain json.
The main difference i see is that in arrays you have an additional string element at index 0.
If you always get the same structure you can do like this:
function jacksonToJson(jackson) {
jackson.removedVertices.splice(0, 1);
jackson.removedVertices.forEach((rmVert) => {
rmVert.info.splice(0, 1);
});
return jackson;
}
Can anybody help me to get data from the json below.I have get a json data in the format below and in this json you can see that there is "{0}" in each record.So my question is how i can get data from this format or is there any way to remove "{0}" from the json.
[{
"ChkValue": "ChkValue",
"Description": "Description",
"Mode": "Mode"
}, {
"0": {
"ChkValue": "false",
"Description": "Made sure guards are in place on machine",
"Mode": "Eliminate"
}
}, {
"0": {
"ChkValue": "false",
"Description": "Use Liveguard at electrical source2",
"Mode": "Isolate"
}
}, {
"0": {
"ChkValue": "false",
"Description": "Wear ear-muffs when using machine",
"Mode": "Isolate"
}
}]
This is a basic javascript object traversal problem.
To access the data inside the second object (that says "Made sure guards are in place..."), you would do:
jsonObj[1]["0"].Description
You can use the JSON.parse() function to work with it in JS.
user JSON.parse() to iterate over JSON
FIDDLE
var a = '[{"ChkValue":"ChkValue","Description":"Description","Mode":"Mode"},{"0":{"ChkValue":"false","Description":"Made sure guards are in place on machine","Mode":"Eliminate"}},{"0":{"ChkValue":"false","Description":"Use Liveguard at electrical source2","Mode":"Isolate"}},{"0":{"ChkValue":"false","Description":"Wear ear-muffs when using machine","Mode":"Isolate"}}]';
var b = JSON.parse(a);
for(var i = 0; i < b.length; i++) {
if(typeof b[i]["0"] != "undefined") {
console.log(b[i]["0"].ChkValue);
console.log(b[i]["0"].Description);
console.log(b[i]["0"].Mode);
}
}
Use list[index][0]
var list = [
{
"ChkValue": "ChkValue",
"Description":"Description",
"Mode":"Mode"
},
{
"0": {
"ChkValue":"false",
"Description":"Made sure guards are in place on machine",
"Mode":"Eliminate"
}
},
{
"0": {
"ChkValue":"false",
"Description":"Use Liveguard at electrical source2",
"Mode":"Isolate"
}
},
{
"0": {
"ChkValue":"false","Description":"Wear ear-muffs when using machine",
"Mode":"Isolate"
}
}
];
console.log(list[1][0].ChkValue); // get "false"
How can I trim everything from my JSON except for a few properties I specify at different levels, while keeping my node structure and array structure?
I've looked into Underscore.js and it seems like it doesn't have as much fine-grained control for preserving the node structure. In the example below, ideally, I would like to be able to specify '_id', 'revisions[0]._id', 'revisions[0]._clientHasViewed' as arguments to keep those properties.
Surely there's an easy way to do this. Here's what I'm looking for:
ORIGINAL
{
"_id": "50cbf5214ffaee8f0400000a",
"_user": "50b1a966c12ef0c426000007",
"expenses": [],
"name": "Untitled Project",
"payments": [],
"revisions": [
{
"_id": "50cbfae65c9d160506000007",
"clientHasViewed": false,
"comments": [],
"dateCreated": "2012-12-15T04:21:58.605Z"
},
{
"_id": "50cbfae65c9d160506000008",
"clientHasViewed": false,
"comments": [],
"dateCreated": "2012-12-15T04:21:58.605Z"
}
],
"status": "Revised",
"thumbURL": "/50cd3107845d90ab28000007/thumb.jpg"
}
TRIMMED
{
"_id": "50cbf5214ffaee8f0400000a",
"revisions": [
{
"_id": "50cbfae65c9d160506000007",
"clientHasViewed": false,
},
],
}
ExtJs has a copyTo function (only one level), but you could create something similar with AngularJs (angular has angular.copy, but that copies the whole object):
var copyTo = function(dest, source, names){
names = names.split(/[,;\s]/);
angular.forEach(names, function(name){
if(source.hasOwnProperty(name)){
dest[name] = source[name];
}
});
return dest;
};
E.g.
var trimmed = copyTo({}, original, '_id,');
trimmed.revisions = [{}];
trimmed = copyTo(trimmed.revisions[0], original.revisions[0], '_id,_clientHasViewed,');