Convert a list object into another structure - javascript

I have a list, converted into js array. Several rows has a Tab prefixes:
var data = [
"2",
" 2.1",
" 2.1.1",
" 2.2",
"3",
"4"
]
What I'm trying to do, is to get following structure:
var data = [
"2",
"2->2.1",
"2->2.1->2.1.1",
"2->2.2",
"3",
"4"
]
Tried (Produce wrong result):
for (var i = 0; i < data.length; i++) {
var current = data;
var length = data[i].length - data[i].replaceAll(" ", "").length;
if (!length) {
console.log(current);
} else {
console.log(data[i-1] + '->' + data[i].trim());
}
}
Update (#MustSeeMelons) - your solution produce wrong results on test data attached below:

flat to tree
I solved this problem in this Q&A. We can reuse the same functions on your data -
const data = `
2
2.1
2.1.1
2.2
3
4
`
// using makeChildren and sanitize from the linked Q&A
console.log(makeChildren(sanitize(data)))
[
{
"value": "2",
"children": [
{
"value": "2.1",
"children": [
{
"value": "2.1.1",
"children": []
}
]
},
{
"value": "2.2",
"children": []
}
]
},
{
"value": "3",
"children": []
},
{
"value": "4",
"children": []
}
]
tree to flat
All that remains now is to convert the tree to flat list of paths -
function* paths(t) {
switch (t?.constructor) {
case Array:
for (const child of t)
yield* paths(child)
break
case Object:
yield [t.value]
for (const path of paths(t.children))
yield [t.value, ...path]
break
}
}
const result =
Array.from(paths(makeChildren(sanitize(data))), path => path.join("->"))
[
"2",
"2->2.1",
"2->2.1->2.1.1",
"2->2.2",
"3",
"4"
]
advantages
Decomposing the problem into smaller parts makes it easier to solve and yields reusable functions but those are not the only advantages. The intermediate tree representation gives you the ability to make other modifications in the context of the tree that the flat representation does not permit. Additionally, the paths function yields arrays of paths segments, allowing the caller to decide which final effect is desired, ie path.join("->"), or otherwise.
demo
Run the demo below to verify the result in your own browser -
const sanitize = (str = "") =>
str.trim().replace(/\n\s*\n/g, "\n")
const makeChildren = (str = "") =>
str === ""
? []
: str.split(/\n(?!\s)/).map(make1)
const make1 = (str = "") => {
const [ value, children ] = cut(str, "\n")
return { value, children: makeChildren(outdent(children)) }
}
const cut = (str = "", char = "") => {
const pos = str.search(char)
return pos === -1
? [ str, "" ]
: [ str.substr(0, pos), str.substr(pos + 1) ]
}
const outdent = (str = "") => {
const spaces = Math.max(0, str.search(/\S/))
const re = new RegExp(`(^|\n)\\s{${spaces}}`, "g")
return str.replace(re, "$1")
}
function* paths(t) {
switch (t?.constructor) {
case Array: for (const child of t) yield* paths(child); break
case Object: yield [t.value]; for (const path of paths(t.children)) yield [t.value, ...path]; break
}
}
const data = `\n2\n\t2.1\n\t\n\t2.1.1\n\t2.2\n3\n4`
console.log(
Array.from(paths(makeChildren(sanitize(data))), path => path.join("->"))
)
.as-console-wrapper { min-height: 100%; top: 0; }
remarks
outdent is generic and works whether you use literal tabs, \t \t\t \t\t\t..., or some number of spaces. What matters is the whitespace is consistent. View the original Q&A for more insight on how each part works.

There are many approaches to this.
Approach #1:
In case you're allowed to use auxiliary space, create an array that will keep track of the latest level of the rows. It is not based on the separator.
let data = ["2","\t2.1","\t\t2.1.1","\t2.2","3","4"], tab="\t", latest = [];
let output = data.map(x => {
let noTabs = x.split(tab).length-1;
latest = [...latest.slice(0, noTabs), x.replaceAll(tab, '')];
return latest.join('->');
})
console.log(output)

This will almost do what you wish, not sure how you want to group them:
const mapped = data.reduce((acc, curr, idx) => {
if (idx !== 0) {
// Get leading digit of current & previous element
const current = curr.trim()[0];
const previous = acc[idx - 1].trim()[0];
// If leading match, aggregate them
if (current === previous) {
acc.push(`${acc[idx - 1].trim()}->${curr.trim()}`);
} else {
acc.push(curr.trim());
}
} else {
acc.push(curr.trim());
}
return acc;
}, []);
Don't use for loops unless you need to break out of the loop at some point. Transforming arrays usually should be done with the map function.
I used reduce because this problem required me to access the new, already mapped element.

Related

Make site map generated from flat array of pages recursive

I have a flat array of pages and I'm trying to create a hierarchical site map from them. What I have so far is very messy so I would like a better way of doing it.
This is the array of pages I have
const pages = [
{
"name": "Page 1",
"url": "/page-1/",
},
{
"name": "Page 2",
"url": "/page-2/",
},
{
"name": "Child 1",
"url": "/page-1/child-1/",
},
{
"name": "Child 2",
"url": "/page-1/child-1/child-2/",
},
{
"name": "Child 3",
"url": "/page-1/child-1/child-2/child-3/",
}
]
and this is the result I want outputting
<ul>
<li>
Page 1
<ul>
<li>
Child 1
<ul>
<li>
Child 2
<ul>
<li>
Child 3
</li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
<li>Page 2</li>
</ul>
This is what I have currently, which works but would like to find a better way to do it
const generateSitemap = function(pages) => {
let sitemap = "";
let organisedPages = [];
const sortPages = function (runs) {
if (pages.length === 0) return;
pages.forEach((page) => {
const title = page.name;
const url = page.url;
// Get homepage and content pages only
let pageObj = {
title: title,
url: url,
children: [],
};
// Handle top level pages first then build up children as you go deeper
if (pageLevel(url) === 1) {
organisedPages.push(pageObj);
pages = pages.filter((page1) => page !== page1);
} else if (runs === 2) {
organisedPages.forEach((oPage, i) => {
// Check to see if url is in segments and matches
let parseUrl = url.substring(1).slice(0, -1);
const urlParts = parseUrl.split("/");
const parentUrl = `/${urlParts.slice(0, -1).join("/")}/`;
if (oPage.url === parentUrl) {
organisedPages[i].children.push(pageObj);
pages = pages.filter(
(page1) => pageObj.url !== page1.systemProperties.url
);
return;
}
});
} else if (runs === 3) {
organisedPages.forEach((oPage, i) => {
// Check to see if url is in segments and matches
let parseUrl = url.substring(1).slice(0, -1);
const urlParts = parseUrl.split("/");
const parentUrl = urlParts.slice(0, -1);
const parentUrlComp = `/${parentUrl.join("/")}/`;
const parentUrl2 = parentUrl.slice(0, -1);
const parentUrl2Comp = `/${parentUrl2.join("/")}/`;
if (oPage.url === parentUrl2Comp) {
organisedPages[i].children.forEach((child, j) => {
if (child.url === parentUrlComp) {
organisedPages[i].children[j].children.push(pageObj);
pages = pages.filter(
(page1) => pageObj.url !== page1.systemProperties.url
);
return;
}
});
}
});
} else if (runs === 4) {
organisedPages.forEach((oPage, i) => {
// Check to see if url is in segments and matches
let parseUrl = url.substring(1).slice(0, -1);
const urlParts = parseUrl.split("/");
const parentUrl = urlParts.slice(0, -1);
const parentUrlComp = `/${parentUrl.join("/")}/`;
const parentUrl2 = parentUrl.slice(0, -1);
const parentUrl2Comp = `/${parentUrl2.join("/")}/`;
const parentUrl3 = parentUrl2.slice(0, -1);
const parentUrl3Comp = `/${parentUrl3.join("/")}/`;
if (oPage.url === parentUrl3Comp) {
organisedPages[i].children.forEach((child, j) => {
if (child.url === parentUrl2Comp) {
organisedPages[i].children[j].children.forEach((child1, k) => {
if (child1.url === parentUrlComp) {
organisedPages[i].children[j].children[k].children.push(
pageObj
);
pages = pages.filter(
(page1) => pageObj.url !== page1.systemProperties.url
);
return;
}
});
}
});
}
});
}
});
runs++;
if (runs < 5) {
sortPages(runs);
}
};
sortPages(1);
/**
* Check if page is a parent
*
* #param {string} url page url
* #returns {number} length of segments
*/
function pageLevel(url) {
// Remove first and last forward slash that is provided
let parseUrl = url.substring(1).slice(0, -1);
// Split parsed url by forward slash
const urlParts = parseUrl.split("/");
// Check segment length
return urlParts.length;
}
/**
* Loop through organised pages and set listing.
*/
organisedPages.forEach((page) => {
sitemap += `<li>`;
sitemap += `${page.title}`;
// Check if we need children loop for each parent page
if (page.children.length) {
sitemap += `<ul>`;
page.children.forEach((page) => {
sitemap += `<li>`;
sitemap += `${page.title}`;
// Check if we need children loop for each sub-child page
if (page.children.length) {
sitemap += `<ul>`;
page.children.forEach((page) => {
sitemap += `<li>`;
sitemap += `${page.title}`;
if (page.children.length) {
sitemap += `<ul>`;
page.children.forEach((page) => {
sitemap += `<li>`;
sitemap += `${page.title}`;
sitemap += `</li>`;
});
sitemap += `</ul>`;
}
sitemap += `</li>`;
});
sitemap += `</ul>`;
}
sitemap += `</li>`;
});
sitemap += `</ul>`;
}
sitemap += `</li>`;
});
return sitemap;
};
generateSitemap(pages)
I would choose to break the logical nesting of objects from the HTML formatting, as I think it makes for simpler functions all around. To comfortably do the nesting, I will also add a helper function to get the parent id from a url, so that, for instance,
getParentId ("/page-1/") //=> ""
getParentId ("/page-1/child-1/") //=> "/page-1"
getParentId ("/page-1/child-1/child-2/") //=> "/page1/child-1"
// ... etc
We could easily inline this function in nest, but I think it's cleaner as a helper. There is a little bit of odd complexity with this, because there is an extra slash somewhere, beginning or end. We choose to slice off the final / when searching through our list to find children.
The code looks like this:
const getParentId = (url) => url .slice (0, url .slice (0, -1) .lastIndexOf ('/'))
const nest = (pages, parentId = "", id = parentId .slice (0, -1)) => pages
.filter (({url}) => getParentId (url) == id)
.map (({url, ...rest}) => ({...rest, url, children: nest (pages, url)}))
const format = (nodes) => `<ul>${nodes .map (({name, url, children}) =>
`<li>${name}${children .length ? format (children) : ''}</li>`
).join('')}</ul>`
const pages2html = (pages) => format (nest (pages))
const pages = [{name: "Page 1", url: "/page-1/"}, {name: "Page 2", url: "/page-2/"}, {name: "Child 1", url: "/page-1/child-1/"}, {name: "Child 2", url: "/page-1/child-1/child-2/"}, {name: "Child 3", url: "/page-1/child-1/child-2/child-3/"}]
console .log (pages2html (pages))
.as-console-wrapper {max-height: 100% !important; top: 0}
Here nest turns your nodes into a format like this:
[
{
name: "Page 1",
url: "/page-1/",
children: [
{
name: "Child 1",
url: "/page-1/child-1/",
children: [
{
name: "Child 2",
url: "/page-1/child-1/child-2/",
children: [
{
name: "Child 3",
url: "/page-1/child-1/child-2/child-3/",
children: []
}
]
}
]
}
]
},
{
name: "Page 2",
url: "/page-2/",
children: []
}
]
and then format turns that into your HTML. We wrap them together in pages2html to have a single function to call, but the work is done in those two at-least-possibly reusable functions.
Note that we don't try to preserve the white-space in your requested format. We could do so, at the expense of making format quite a bit uglier. Here's a quick attempt, which I think is correct. But I wouldn't swear to it:
const format = (nodes, depth = 0) => `${depth > 0 ? '\n' : ''
}${' '.repeat (2 * depth) }<ul>${nodes .map (({name, url, children}) => `
${' ' .repeat (2 * depth + 1) }<li>
${' ' .repeat (2 * depth + 2) }${name}${
children .length ? `` + format (children, depth + 1) : ''
}
${' ' .repeat (2 * depth + 1) }</li>`
).join('')}
${' '.repeat (2 * depth)}</ul>`
We simply use a depth parameter to note the level of nesting, and use that to figure out how many spaces to use at the beginning of lines.
In general, I find this style, working as a sequence of transformations, much simpler to work with. Yes, we could do all this in a single function. It might even have fewer lines of code than my four separate functions. But each of these is simpler to understand, and simpler to change when the need arises.
And recursion will be more flexible and much simpler than your multi-level branching, as you can see from the fairly simple recursive code in next.

How do I parse the indentation level of a string into a JSON Object?

I'd like to be able to parse a string into a JSON Object, something like this (the text can be anything, I'm just putting them like this so you can see the structure):
A
A-A
A-B
A-B-A
A-B-B
A-C
A-C-A
B
into a json object, structured like this:
[
{
"root": "A",
"content": [
{ "root": "A-A", "content": [] },
{
"root": "A-B",
"content": [
{ "root": "A-B-A", "content": [] },
{ "root": "A-B-B", "content": [] }
]
},
{
"root": "A-C",
"content": [
{ "root": "A-C-A", "content": [] }
]
}
]
},
{ "root": "B", "content": [] }
]
So far, I have the following, but I'm not sure if this is the best way of doing it. Maybe a recursive approach would be better?
let body = [];
let indentStack = [0];
for (let line of input.split('\n')) { // input is the string I'd like to parse
if (line.trim() == '') continue; // skips over empty lines
let indent = line.match(/^ +/);
indent = indent ? indent[0].length : 0; // matches the first group of spaces with regex, gets the indent level of this line
if (indentStack[indentStack.length-1] != indent)
if (indentStack.includes(indent)) indentStack.length = indentStack.indexOf(indent)+1; // remove all indent levels after it as it's returned back to a higher level
else stack.push(indent);
console.log(`${(indent + '[' + indentStack.join() + ']').padEnd(10, ' ')}: ${line}`); // debugging
if (indentStack.length == 1) body.push({ root: line, content: [] });
else {
body[body.length-1].content.push({ root: line.substring(indent), content: [] })
}
}
console.log(body)
I will do that this way :
const data =
`A
A-A
A-B
A-B-A
A-B-B
A-C
A-C-A
B`;
function doTree(data)
{
let
res = []
, levels = [ res ]
;
for (let line of data.split('\n'))
{
let
level = line.search(/\S/) >> 1 // (index of first non whitespace char) / 2 --> IF indentation is 2 spaces
, root = line.trim()
, content = []
;
if (!root) continue
levels[level].push({root,content})
levels[++level] = content
}
return res
}
console.log( doTree(data) )
.as-console-wrapper {max-height: 100%!important;top:0 }
The question about indentations ...
here you can have unequal indentation steps,
either with spaces or with tabs.
(do not mix spaces and tabs)
const data_023c = // indentation values are 0c, 2c, 3c
`A
A-A
A-B
A-B-A
A-B-B
A-C
A-C-A
B`;
const indentation= (()=> // IIFE
{
let
indents = []
, max = -1
;
return {
clear:() =>
{
indents.length = 0
max = -1
}
, get:(line, lNum='?' ) =>
{
let ncBefore = line.search(/\S/)
let level = indents.indexOf(ncBefore)
if (level===-1)
{
if (ncBefore < max) throw `error on indentation,\n line = ${lNum},\n line value is = "${line}"`
level = indents.push( ncBefore) -1
max = ncBefore
}
return level
}
}
})()
const doTree = data =>
{
let
res = []
, levels = [ res ]
, lineN = 0
;
indentation.clear()
for (let line of data.split('\n'))
{
lineN++ // line counter for indent error message
let
root = line.trim()
, content = []
;
if (!root) continue
let level = indentation.get(line, lineN)
levels[level].push({root,content})
levels[++level] = content
}
return res
}
console.log( doTree(data_023c) )
.as-console-wrapper {max-height: 100%!important;top:0 }

Efficient way to delete from an array of objects in JavaScript

frontendtasks = [
{"id": 1, "name": "User Deletion", "script": "UserDeletion"},
{"id": 2, "name": "User Creation", "script_name": "UserCreation"}
]
backendtasks = [
{"id": 1, "name": "User Deletion", "script": "UserDeletion_V2"}
]
I'm trying to delete the entry with id = 1 in frontendtask and push the entry from backendtask with this code.
if (backendtasks != 0) {
for (updated_task in backendtasks ) {
for (oldtask in frontendtasks) {
if (frontendtasks[oldtask].id == backendtasks[updated_task].id) {
frontendtasks[oldtask] = backendtasks[updated_task]
delete backendtasks[updated_task];
break;
}
}
}
for (new_task in backendtasks) {
frontendtasks.unshift(backendtasks[new_task])
}
}
This is really slow and CPU hits 100% in browser with 700 items. Is there any efficient way to implement this?
Don't loop through both arrays, instead use an object to map backend ids to values:
const mappings = {};
for (const task of backendtasks) {
mappings[task.id] = task;
}
for (let i = 0; i < frontendtasks.length; i ++) {
const curid = frontendtasks[i].id;
if (curid in mappings) {
frontendtasks[i] = mappings[curid];
delete mappings[curid];
}
}
// push is faster than unshift
for (const key in mappings) {
frontendtasks.push(mappings[key]);
}
Approach: Since you have 2 arrays, I would suggest first normalizing backend array to an object, and then iterate on frontend array and lookup in normalized object as lookup in object is O(1) as compared to O(n) in array.
function getFrontendTasks(){
const frontendtasks = [
{"id": 1, "name": "User Deletion", "script": "UserDeletion"},
{"id": 2, "name": "User Creation", "script_name": "UserCreation"}
]
const backendtasks = [
{"id": 1, "name": "User Deletion", "script": "UserDeletion_V2"}
]
const normalizedBackendTasks = backendtasks.reduce((acc, val) => ({...acc, [val.id]: val}), {});
const newFrontendTasks = frontendtasks.map((task) => normalizedBackendTasks[task.id] || task);
return newFrontendTasks
}
console.log(getFrontendTasks())
Creating a mapping table reduces the time complexity from O(n^2) to O(n), by removing the nested for loops, which is very expensive.
Try the following code:
const map = {};
backendtasks.forEach(bt => (map[bt.id] = bt));
frontendtasks.forEach((ft, idx) => {
if (map[ft.id]) {
frontendtasks[idx] = map[ft.id];
delete map[ft.id];
}
});
frontendtasks = frontendtasks.concat(Object.values(map));
Somehow I didn't see the map() function in any solution that creates a new array as shown below.
This will return the new array with the new value. As you can see, it takes an array, an id (this could be anything and any type tho), and a callback.
Searcing for the id in the array and runs the callback when found. Efficient way for what you want to do.
In the callback, I used find() on the backendtasks simply because I need to find the item which has the same id (id: 1).
When found, it returns the item from backendtasks then completes the function by returning that value in the map() function.
So, this should be O(n), considering that the callback only runs once and it's a more elegant solution for multiple uses in my opinion.
const frontendtasks: any[] = [];
const backendtasks: any[] = [];
const fn = (arr: any[], id: number, callback: (removed: any) => any) => {
return arr.map((ft) => {
if (ft.id !== id) return ft;
else return callback(ft);
});
};
fn(frontendtasks, 1, (rm) => backendtasks.find((id) => rm.id === id));

Recursive function returning empty arrray

I am having issue with my recursive function getPath, as it is returning an empty array, when it should be returning an array that looks something like this:
['main', 'children', 'name']
I am not sure if the logic pare is right, as that isn't what the question is about, the question is, why is my array empty? It is pushing data onto the array, but the final result is an empty array.
let dataScope = [{
"name": "main",
"location": [":data"]
}, {
"name": "child",
"location": ["main", "children"]
}]
function getLocation(key) {
let val = dataScope.find(i => i.name == key)
return val ? val.location : []
}
function getPath(items) {
let path = []
let item = items.shift()
if (item) {
let loc = getLocation(item)
if (loc.length > 0 && loc.join('.') != ':data') {
path.push(...getPath(loc))
console.log('added to array')
}
}
return path
}
console.log(getPath(['child', 'name']))
You don't do anything with loc so, it seems nothing gets pushed to the array
Note: I'm still trying to get to grips with why your original code results in an empty array - however, this code produces the expected result :p
let dataScope = [{
"name": "main",
"location": [":data"]
}, {
"name": "child",
"location": ["main", "children"]
}]
function getLocation(key) {
let val = dataScope.find(i => i.name == key);
return val ? val.location : []
}
function getPath(items, indent = 0) {
let z = items.join(',');
console.log(`${' '.repeat(indent)}called with ${z}`);
let path = [];
let item = items.shift();
let loc = [];
if (item) {
loc = getLocation(item);
if (loc.length > 0 && loc.join('.') != ':data') {
path.push(...getPath(loc.slice(), indent + 4)); // .slice() so loc isn't mutated
console.log(`${' '.repeat(indent)}${z} has path [${path.join(',')}]`);
}
path.push(...loc); // add loc to the path - comment this out to see the difference
}
console.log(`${' '.repeat(indent)}${z} returns [${path.join(',')}]`);
return path
}
console.log(`[${getPath(['child', 'name'])}]`)
First youre passing an array of names to getPath but then later youre passing the location array. Which one should it be? Logic needs tweaking. And also there's nothing in the dataset using the value "name" so your test is incorrect as well.
Its because you're doing recursion sending dataScope location but you implemented getPath expecting dataScope keys:
let dataScope = [{
"name": "main",
"location": [":data"]
}, {
"name": "child",
"location": ["main", "children"]
}]
function getLocation(key) {
let val = dataScope.find(i => i.name == key)
return val ? val.location : []
}
function getPath(keys) { // changing items name to keys for clarification
let path = []
let key = keys.shift()
if (key) {
let loc = getLocation(key);
if (loc.length > 0 && loc.join('.') != ':data') {
path.push(...loc) // push locs into array
getPath(keys) // call getPath with remaining keys
console.log('added to array')
}
}
return path
}
console.log(getPath(['child', 'main']))
You will not have :data into your path result because of this statement: loc.join('.') != ':data'. If you remove it you will get your expected output.

Efficiently rename/re-map javascript/json object keys within array of objects

I have some structured JSON data like so. Let's assume this is interchangeable, via JSON.parse():
[
{
"title": "pineapple",
"uid": "ab982d34c98f"
},
{
"title": "carrots",
"uid": "6f12e6ba45ec"
}
]
I need it to look like this, remapping title to name, and uid to id with the result:
[
{
"name": "pineapple",
"id": "ab982d34c98f"
},
{
"name": "carrots",
"id": "6f12e6ba45ec"
}
]
The most obvious way of doing it is like this:
str = '[{"title": "pineapple","uid": "ab982d34c98f"},{"title": "carrots", "uid": "6f12e6ba45ec"}]';
var arr = JSON.parse(str);
for (var i = 0; i<arr.length; i++) {
arr[i].name = arr[i].title;
arr[i].id = arr[i].uid;
delete arr[i].title;
delete arr[i].uid;
}
str = '[{"title": "pineapple","uid": "ab982d34c98f"},{"title": "carrots", "uid": "6f12e6ba45ec"}]';
var arr = JSON.parse(str);
for (var i = 0; i<arr.length; i++) {
arr[i].name = arr[i].title;
arr[i].id = arr[i].uid;
delete arr[i].title;
delete arr[i].uid;
}
$('body').append("<pre>"+JSON.stringify(arr, undefined, 4)+"</pre>");
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>
...or using something more complex (albeit not more efficient) like this.
This is all fine and dandy, but what if there were 200,000 objects in the array? This is a lot of processing overhead.
Is there a more efficient way to remap a key name? Possibly without looping through the entire array of objects? If your method is more efficient, please provide proof/references.
As I already mentioned in the comments, if you can make certain assumptions about the values of the objects, you could use a regular expression to replace the keys, for example:
str = str.replace(/"title":/g, '"name":');
It's not as "clean", but it might get the job done faster.
If you have to parse the JSON anyway, a more structured approach would be to pass a reviver function to JSON.parse and you might be able to avoid an additional pass over the array. This probably depends on how engine implement JSON.parse though (maybe they parse the whole string first and then make a second pass with the reviver function, in which case you wouldn't get any advantage).
var arr = JSON.parse(str, function(prop, value) {
switch(prop) {
case "title":
this.name = value;
return;
case "uid":
this.id = value;
return;
default:
return value;
}
});
Benchmarks, using the Node.js script below to test 3 times:
1389822740739: Beginning regex rename test
1389822740761: Regex rename complete
// 22ms, 22ms, 21ms
1389822740762: Beginning parse and remap in for loop test
1389822740831: For loop remap complete
// 69ms, 68ms, 68ms
1389822740831: Beginning reviver function test
1389822740893: Reviver function complete
// 62ms, 61ms, 60ms
It appears as if the regex (in this case) is the most efficient, but be careful when trying to parse JSON with regular expressions.
Test script, loading 100,230 lines of the OP's sample JSON:
fs = require('fs');
fs.readFile('test.json', 'utf8', function (err, data) {
if (err) {
return console.log(err);
}
console.log(new Date().getTime() + ": Beginning regex rename test");
var str = data.replace(/"title":/g, '"name":');
str = str.replace(/"uid":/g, '"id":');
JSON.parse(str);
console.log(new Date().getTime() + ": Regex rename complete");
console.log(new Date().getTime() + ": Beginning parse and remap in for loop test");
var arr = JSON.parse(data);
for (var i = 0; i < arr.length; i++) {
arr[i].name = arr[i].title;
arr[i].id = arr[i].uid;
delete arr[i].title;
delete arr[i].uid;
}
console.log(new Date().getTime() + ": For loop remap complete");
console.log(new Date().getTime() + ": Beginning reviver function test");
var arr = JSON.parse(data, function (prop, value) {
switch (prop) {
case "title":
this.name = value;
return;
case "uid":
this.id = value;
return;
default:
return value;
}
});
console.log(new Date().getTime() + ": Reviver function complete");
});
Asked this question a long time ago, and since then, I've grown acustomed to using Array.prototype.map() to get the job done, more for stability and cleanliness of code than performance. While it's certainly not the most performant, it looks great:
var repl = orig.map(function(obj) {
return {
name: obj.title,
id: obj.uid
}
})
If you need a more flexible (and ES6-compatible function), try:
let replaceKeyInObjectArray = (a, r) => a.map(o =>
Object.keys(o).map((key) => ({ [r[key] || key] : o[key] })
).reduce((a, b) => Object.assign({}, a, b)))
e.g.
const arr = [{ abc: 1, def: 40, xyz: 50 }, { abc: 1, def: 40, xyz: 50 }, { abc: 1, def: 40, xyz: 50 }]
const replaceMap = { "abc": "yyj" }
replaceKeyInObjectArray(arr, replaceMap)
/*
[
{
"yyj": 1,
"def": 40,
"xyz": 50
},
{
"yyj": 1,
"def": 40,
"xyz": 50
},
{
"yyj": 1,
"def": 40,
"xyz": 50
}
]
*/
Here's another take on the OP's suggestion to use map() for clarity (not performance).
var newItems = items.map(item => ({
name: item.title,
id: item.uid
}));
This uses ES6 arrow functions and the shortcut syntaxes that are possible when there's only one parm passed to the function and only one statement in the body of the function.
Depending on your history with lambda expressions in various languages, this form may or may not resonate with you.
Be careful when returning an object literal in the arrow function shortcut syntax like this. Don't forget the additional parens around the object literal!
If you want to make it a little more reusable. Maybe this is a decent approach.
function rekey(arr, lookup) {
for (var i = 0; i < arr.length; i++) {
var obj = arr[i];
for (var fromKey in lookup) {
var toKey = lookup[fromKey];
var value = obj[fromKey];
if (value) {
obj[toKey] = value;
delete obj[fromKey];
}
}
}
return arr;
}
var arr = [{ apple: 'bar' }, { apple: 'foo' }];
var converted = rekey(arr, { apple: 'kung' });
console.log(converted);
Using ES6:
const renameFieldInArrayOfObjects = (arr, oldField, newField) => {
return arr.map(s => {
return Object.keys(s).reduce((prev, next) => {
if(next === oldField) {
prev[newField] = s[next]
} else {
prev[next] = s[next]
}
return prev
}, {})
})
}
Using ES7:
const renameFieldInArrayOfObjects = (arr, oldField, newField) => {
return arr.map(s => {
return Object.keys(s).reduce((prev, next) => {
return next === oldField
? {...prev, [newField]: s[next]}
: {...prev, [next]: s[next]}
}, {})
})
}
You can use an npm package named node-data-transform.
Your data :
const data = [
{
title: 'pineapple',
uid: 'ab982d34c98f',
},
{
title: 'carrots',
uid: '6f12e6ba45ec',
},
];
Your mapping :
const map = {
item: {
name: 'title',
id: 'uid',
},
};
And use the package :
const DataTransform = require("node-json-transform").DataTransform;
const dataTransform = DataTransform(data, map);
const result = dataTransform.transform();
console.log(result);
Result :
[
{
name: 'pineapple',
id: 'ab982d34c98f'
},
{
name: 'carrots',
id: '6f12e6ba45ec'
}
]
Maybe it's not the best way for performance, but it's quite elegant.
var jsonObj = [/*sample array in question*/ ]
Based on different benchmarks discussed below, fastest solution is native for:
var arr = [];
for(var i = 0, len = jsonObj .length; i < len; i++) {
arr.push( {"name": jsonObj[i].title, "id" : jsonObj[i].uid});
}
I think alternatively without using a frameworks this will be option 2:
var arr = []
jsonObj.forEach(function(item) { arr.push({"name": item.title, "id" : item.uid }); });
There is always debate between using navite and non-navite functions. If I remember correctly lodash argued they were faster than underscore because the use non-native functions for key operations.
However different browsers will produce sometimes very different results. I always looked for the best average.
For benchmarks you can take a look at this:
http://jsperf.com/lo-dash-v1-1-1-vs-underscore-v1-4-4/8
function replaceElem(value, replace, str) {
while (str.indexOf(value) > -1) {
str = str.replace(value, replace);
}
return str;
}
call this from main
var value = "tittle";
var replace = "name";
replaceElem(value, replace, str);

Categories

Resources