How to convert an array of arrays to an object in Javascript - javascript

I need the solution for the general case
for example
let data = [['a', 'b'],['c', 'd'],['e', 'f', 'g', 'h']];
I need this:
{
"a": {
"c": {
"e": 0,
"f": 0,
"g": 0,
"h": 0
},
"d": {
"e": 0,
"f": 0,
"g": 0,
"h": 0
}
},
"b": {
"c": {
"e": 0,
"f": 0,
"g": 0,
"h": 0
},
"d": {
"e": 0,
"f": 0,
"g": 0,
"h": 0
}
}
}
and data can be any random array of arrays... I tried a recursive approach but I get stuck with Map and .fromEntries method...

Simple recursion:
Base case - we only have one array inside the array. We construct an object with default values from it.
Recursive step - we have more arrays. We build up an object and each key comes from the first array, each value is a recursive call that uses the rest of the arrays:
const buildObj = (data, defaultValue = 0) => {
if (data.length > 1)
return Object.fromEntries(
data[0].map(x => [x, buildObj(data.slice(1), defaultValue)])
)
return Object.fromEntries(
data[0].map(x => [x, defaultValue])
);
}
console.log(buildObj([
['e', 'f', 'g', 'h']
]));
console.log(buildObj([
['c', 'd'],
['e', 'f', 'g', 'h']
]));
console.log(buildObj([
['a', 'b'],
['c', 'd'],
['e', 'f', 'g', 'h']
]));
console.log(buildObj([
['a', 'b'],
['c', 'd'],
['e', 'f', 'g', 'h']
], 42)); //different default
Can also be represented by:
Base case - return the default value.
Recursive step - We build up an object and each key comes from the first array, each value is a recursive call that uses the rest of the arrays.
const buildObj = (data, defaultValue = 0) => {
if (data.length !== 0)
return Object.fromEntries(
data[0].map(x => [x, buildObj(data.slice(1), defaultValue)])
);
return defaultValue;
}
console.log(buildObj([
['e', 'f', 'g', 'h']
]));
console.log(buildObj([
['c', 'd'],
['e', 'f', 'g', 'h']
]));
console.log(buildObj([
['a', 'b'],
['c', 'd'],
['e', 'f', 'g', 'h']
]));
console.log(buildObj([
['a', 'b'],
['c', 'd'],
['e', 'f', 'g', 'h']
], 42)); //different default

I think this runs good. Runs recursion with step. You can change forEach to for loop, if you want.
let data = [['a', 'b'],['c', 'd'],['e', 'f', 'g', 'h']];
const arrToDict = (data) => {
const recursive = (depth = 0) => {
let dict = {}
if (data.length === depth + 1) {
data[depth].forEach(el => {
dict[el] = 0
})
} else {
data[depth].forEach(el => {
dict[el] = recursive(depth+1)
})
}
return dict
}
return recursive();
}
arrToDict(data)

I have used a recursion for converting each index to object and then used Memoization for a more efficient solution
Base Case: When the index has gone out of bounds of current array
Recursive Case: recurse over the next index and assign the next objectified index as a value to the current key
//store the index which is already iterated/objectified
//this is just for LESS COMPUTATION and MORE EFFICIENCY
const memoizeObjectifiedIndex = {};
let data = [['a', 'b'],['c', 'd'],['e', 'f', 'g', 'h']];
//basic recursive approach
function createObject(data,index){
//base case,
//if the index is just outside the length of array,
//here that index=3, since array is 0 indexed and last index is 2
if(index === data.length) return 0;
//check in memoized object if current index is already objectfied
if(memoizeObjectifiedIndex[index]){
//you can check the hits when this condition is true and COMPUTATION is saved
// console.log("Found for index ", index);
return memoizeObjectifiedIndex[index];}
const obj={};
data[index].forEach((key) => {
//assign the next objectified index as value to current key
obj[key] = createObject(data,index+1);
})
//store the object for current index for future use
memoizeObjectifiedIndex[index] = obj;
return obj;
}
console.log(createObject(data,0))
Note: You can see better output by copying and running this code in browser console.

Related

Can't find the longest occurancy in a set of duplicates

The target is to detect superstring in a set of arrays. In this case it should be 'bal' but I get 'lbal'.
const arr = [
['g', 'l', 'o', 'b', 'a', 'l'],
['b','a','l','l']
]
const res = argv.reduce((acc, val) => acc.filter(elem => val.includes(elem)))
This function just gives ALL duplicates(items that are presented in any array) when I need only the most long duplicate sequence. Any suggestions?
you can create an object that counts how many times an element is present in the array of array
like this
const arr = [
['a', 'b', 'm'],
['g', 'o', 'a', 'b'],
['w', 'o', 'u', 'k', 'a', 'b']
]
const countTimes = data => data.flat().reduce((res, v) => {
return {
...res,
[v]: (res[v] || 0 ) + 1
}
}, {})
const duplicates = data => Object.entries(countTimes(data))
.filter(([v, n]) => n > 1)
.map(([v, n]) => v)
console.log(countTimes(arr))
console.log(duplicates(arr) )
const original = [
['a', 'b', 'm'],
['g', 'o', 'a', 'b'],
['w', 'o', 'u', 'k', 'a', 'b']
]
// The easiest is to split up between unique results and duplicates
let uniqueValues = []
let duplicates = []
// Now we're going to loop every array
original.forEach((arr) => {
// Loop every value inside the array
arr.forEach((value) => {
// Check if we had this value already
if (!uniqueValues.includes(value)) {
uniqueValues.push(value)
} else {
duplicates.push(value)
}
})
})
console.log('Duplicates: ', duplicates)
// If you want remove the duplicates from the duplicates, use set
let uniqueDuplicates = [...new Set(duplicates)]
console.log('Unique duplicates: ', uniqueDuplicates)
If you don't have to know in which array duplicates are, you can use Array.prototype.flat() to get rid of nested arrays, then check simple array for duplicates.
const arr = [
['a', 'b', 'm'],
['g', 'o', 'a', 'b'],
['w', 'o', 'u', 'k', 'a', 'b']
]
const arr2 = arr.flat() // ['a', 'b', 'm', 'g', 'o', 'a', 'b', 'w', 'o', 'u', 'k', 'a', 'b']
const hasDuplicates = new Set(arr2).size !== arr2.length
you can create a tally, to find how many each element globally is duplicated by doing a nested forEach
function findDuplicates(data) {
const map = {};
data.forEach((row) => {
row.forEach((item) => {
if (!map[item]) {
map[item] = 1;
} else [map[item]++];
});
});
return map;
}
You can create a simple counter object (if you know python, this is similar to collections.Counter):
class Counter extends Map {
update(values) {
for (let val of values)
this.set(val, 1 + (this.get(val) ?? 0))
}
}
ARR = [
['a', 'b', 'm'],
['g', 'o', 'a', 'b'],
['w', 'o', 'u', 'k', 'a', 'b']
]
const tally = new Counter()
for (let subArray of ARR)
tally.update(new Set(subArray))
for (let [element, count] of tally)
if (count === ARR.length)
console.log(element)
count === yourArrayOfArrays.length selects elements that appear in all arrays, you can replace it with count > 1 to find any duplicates.

Given a list of links, find all link chains with length greater than one

Given an table of links of the form:
const links = [
['a', 'b'],
['c', 'd'],
['e', 'f'],
['b', 'x'],
['x', 'z']
];
where the first column is unique i.e. links[0...length][0] are all unique.
I would like to find all connections greater than 1. In the example above, the output should be
[["a", "b"], ["b", "x"], ["x", "z"]]
Here is my attempt which is based on a similar question for java
const links = [
['a', 'b'],
['c', 'd'],
['e', 'f'],
['b', 'x'],
['x', 'z']
];
connections = [];
const map = new Map()
const recurse = (value, key) => {
if (map.has(value)) {
if (key !== undefined) {
connections.push([key, map.get(key)]);
}
connections.push([value, map.get(value)])
recurse(map.get(value))
}
}
const findConnectionsWithMap = arr => {
arr.forEach(value => map.set(value[0], value[1]))
const keySet = map.keys();
//console.log(keySet);
while (key = keySet.next().value) {
recurse(map.get(key), key);
}
console.log(connections);
}
findConnectionsWithMap(links);
Current output is
[["a", "b"], ["b", "x"], ["x", "z"], ["b", "x"], ["x", "z"]]
I am not sure why the output is having duplicates but I am guessing it has to do with the recursion point.
Why is this happening?
and is this the best approach for a large record set?
You could use recursion and call the recursive method as long as the link is found between two elements. To keep chains unique you also have to remove the element once the element belongs to some chain otherwise for your data example you will actually have two chains [["a","b"],["b","x"],["x","z"]] and [["b","x"],["x","z"]]
const links = [
['a', 'b'],
['c', 'd'],
['e', 'f'],
['b', 'x'],
['x', 'z']
];
function getConnections(data) {
function connect(a, data) {
const chain = []
const index = data.findIndex((b) => a[1] === b[0])
if (index > -1) {
chain.push(data[index], ...connect(data[index], data))
data.splice(index, 1)
}
return chain
}
return data.reduce((r, e) => {
const chain = connect(e, data)
if (chain.length) {
r.push([e, ...chain])
}
return r
}, [])
}
console.log(JSON.stringify(getConnections(links)))
Based on my recent answer of detecting cyclical paths in array (much like this one), here's the solution.
This one is without recursion, just looping on all items, finding chains. this assumes only 1 -1 links. If that is not the case do let know in the comments.
const links = [
['a', 'b'],
['c', 'd'],
['e', 'f'],
['b', 'x'],
['x', 'z']
];
function search_id(arr, id) {
return arr.find(item => item[0] === id)
}
function find_paths(arr) {
var result = []
for (var i = 0; i < arr.length; i++) {
var item = arr[i];
var seen = [item[0]]
while (true) {
var target = search_id(arr, item[1]);
if (target === undefined) {
break;
}
if (seen.indexOf(target[0]) > -1) {
// cyclical
console.error("cyclical chain found ")
break;
}
seen.push(target[0]);
item = target;
}
if (seen.length > 1) {
result.push(seen)
}
}
return result;
}
console.log(find_paths(links));

Modify object containing array based on an array which contains an object

Here are the variables:
let linked = {
related: [
[0, 'a', 'b'],
[0, 'c', 'd', 'e', 'f', 'g'],
[0, "s"],
[0, 'd'],
[0, 'g', 'n', 'h']
]
}
let hold = [{
0: 4, // 0 represents [0,'a','b']
1: 3 // 1 represents [0,'c','d','e','f','g']
},
{
3: 2, // 3 represents [0,'d']
4: 6 // 4 represents [0,'g','n', 'h']
}
];
The hold array contains two objects and each object's property represents index of link.related .
The problem is I want to add values of each hold object property to the first element of linked.related.
So the result should be:
let linked = {
related: [
[4, 'a', 'b'],
[3, 'c', 'd', 'e', 'f', 'g'],
[0, "s"],
[2, 'd'],
[6, 'g', 'n', 'h']
]
}
So I want to sum values of hold with the first element of linked.related
You can do it in 2 forEach loops
hold.forEach(x => {
Object.keys(x).forEach (y => {
linked.related[y][0] += x[y]
});
});
let linked = {
related: [
[0, 'a', 'b'],
[0, 'c', 'd', 'e', 'f', 'g'],
[0, "s"],
[0, 'd'],
[0, 'g', 'n', 'h']
]
}
let hold = [{
0: 4,
1: 3
},
{
3: 2,
4: 6
}
];
hold.forEach(x => {
Object.keys(x).forEach (y => {
linked.related[y][0] += x[y]
});
});
console.log(linked.related);
You could iterate hold and get the entries for the update.
var linked = { related: [[0, 'a', 'b'], [0, 'c', 'd', 'e', 'f', 'g'], [0, "s"], [0, 'd'], [0, 'g', 'n', 'h']] },
hold = [{ 0: 4, 1: 3 }, { 3: 2, 4: 6 }];
hold.forEach(o => Object.entries(o).forEach(([i, v]) => linked.related[i][0] += v));
console.log(linked);
.as-console-wrapper { max-height: 100% !important; top: 0; }
You can use forEach and Object.entries
let linked = {related: [[0, 'a', 'b'],[0, 'c', 'd', 'e', 'f', 'g'],[0, "s"],[0, 'd'],[0, 'g', 'n', 'h']]}
let hold =[{0: 4, 1:3},{3: 2, 4:6}]
hold.forEach(v => {
Object.entries(v).forEach(([k, v]) => {
linked.related[k][0] += v
})
})
console.log(linked)

Merge arrays and keep ordering

NOTE
The question has been edited following the good advise from #Kaddath to highlight the fact that the ordering doesn't have to be alphabetical but depending on the position of items inside the arrays.
I have an array of arrays where each of the arrays are based on a given ordering but they can differ a bit.
For example, the base ordering is X -> D -> H -> B and here is my array of arrays:
const arrays = [
['X', 'D', 'H', 'B'],
['X', 'D', 'K', 'Z', 'H', 'B', 'A'],
['X', 'M', 'D', 'H', 'B'],
['X', 'H', 'T'],
['X', 'D', 'H', 'B']
]
I would like to merge all arrays into a single one and remove duplicates but by keeping the ordering. In my example the result would be ['X', 'M', 'D', 'K', 'Z', 'H', 'T', 'B', 'A'].
In the example we can see that M is between X and D inside the third array and it is so placed between X and D in the final output.
I know conflicts may arise but here are the following rules:
Every items should appear in the final output.
If an item is appearing in multiple arrays at different positions, the first appearance is the right one (skip others).
What I've done so far is merging all of these arrays into a single one by using
const merged = [].concat.apply([], arrays);
(cf. https://stackoverflow.com/a/10865042/3520621).
And then getting unique values by using this code snippet from https://stackoverflow.com/a/1584377/3520621 :
Array.prototype.unique = function() {
var a = this.concat();
for(var i=0; i<a.length; ++i) {
for(var j=i+1; j<a.length; ++j) {
if(a[i] === a[j])
a.splice(j--, 1);
}
}
return a;
};
const finalArray = merged.unique();
But my result is this:
[
"X",
"D",
"H",
"B",
"K",
"Z",
"A",
"M",
"T"
]
Any help is welcome!
Thanks.
const arrays = [
['X', 'D', 'H', 'B'],
['X', 'D', 'K', 'Z', 'H', 'B', 'A'],
['X', 'M', 'D', 'H', 'B'],
['X', 'H', 'T'],
['X', 'D', 'H', 'B']
];
const result = [];
arrays.forEach(array => {
array.forEach((item, idx) => {
// check if the item has already been added, if not, try to add
if(!~result.indexOf(item)) {
// if item is not first item, find position of his left sibling in result array
if(idx) {
const result_idx = result.indexOf(array[idx - 1]);
// add item after left sibling position
result.splice(result_idx + 1, 0, item);
return;
}
result.push(item);
}
});
});
console.log('expected result', ['X', 'M', 'D', 'K', 'Z', 'H', 'T', 'B', 'A'].join(','));
console.log(' current result',result.join(','));
Every array is in fact a set of rules that tells what is the relative order between the elements. Final list should return all elements while respecting relative order defined by all rules.
Some solutions have solved the initial request, some even didn't solve that one (all that suggest using sort kind of missed the point of the question). Nevertheless, none proposed a generic solution.
The problem
If we look at the problem asked in the OP, this is how the rules define what is the relative position between elements:
M K -> Z T
^ \ ^ \ ^
/ v/ v/
X -> D ------> H -> B -> A
So, it is easy to see that our array starts with X. Next element can be both D and M. But, D requires M to already be in array. That is why we will put M as our next element, and then D. Next, D points to both K and H. But since H has some other predecessor that are not collected until now, and K has none (actually it has D, but it is already collected in the list), we will put K and Z, and only then H.
H points to both T and B. It actually doesn't matter which one we put first. So, last three elements can be in any of the following three orders:
T, B, A
B, A, T
B, T, A
Let us also take into account a little bit more complex case. Here are the rules:
['10', '11', '12', '1', '2'],
['11', '12', '13', '2'],
['9', '13'],
['9', '10'],
If we draw the graph using those rules we would get following:
--------------> 13 ----
/ ^ \
/ / v
9 -> 10 -> 11 -> 12 > 1 -> 2
What is specific about this case? Two things:
Only in the last rule we "find out" that the number 9 is the beginning of the array
There are two non direct paths from 12 to 2 (one over the number 1, second over the number 13).
Solution
My idea is to create a node from each element. And then use that node to keep track of all immediate successors and immediate predecessors. After that we would find all elements that don't have predecessors and start "collecting" results from there. If we came to the node that has multiple predecessors, but some of them are not collected, we would stop recursion there. It can happen that some of the successors is already collected in some other path. We would skip that successor.
function mergeAndMaintainRelativeOrder(arrays/*: string[][]*/)/*: string[]*/ {
/*
interface NodeElement {
value: string;
predecessor: Set<NodeElement>;
successor: Set<NodeElement>;
collected: boolean;
}
*/
const elements/*: { [key: string]: NodeElement }*/ = {};
// For every element in all rules create NodeElement that will
// be used to keep track of immediate predecessors and successors
arrays.flat().forEach(
(value) =>
(elements[value] = {
value,
predecessor: new Set/*<NodeElement>*/(),
successor: new Set/*<NodeElement>*/(),
// Used when we form final array of results to indicate
// that this node has already be collected in final array
collected: false,
}),
);
arrays.forEach((list) => {
for (let i = 0; i < list.length - 1; i += 1) {
const node = elements[list[i]];
const nextNode = elements[list[i + 1]];
node.successor.add(nextNode);
nextNode.predecessor.add(node);
}
});
function addElementsInArray(head/*: NodeElement*/, array/*: string[]*/) {
let areAllPredecessorsCollected = true;
head.predecessor.forEach((element) => {
if (!element.collected) {
areAllPredecessorsCollected = false;
}
});
if (!areAllPredecessorsCollected) {
return;
}
array.push(head.value);
head.collected = true;
head.successor.forEach((element) => {
if (!element.collected) {
addElementsInArray(element, array);
}
});
}
const results/*: string[]*/ = [];
Object.values(elements)
.filter((element) => element.predecessor.size === 0)
.forEach((head) => {
addElementsInArray(head, results);
});
return results;
}
console.log(mergeAndMaintainRelativeOrder([
['X', 'D', 'H', 'B'],
['X', 'D', 'K', 'Z', 'H', 'B', 'A'],
['X', 'M', 'D', 'H', 'B'],
['X', 'H', 'T'],
['X', 'D', 'H', 'B'],
]));
console.log(mergeAndMaintainRelativeOrder([
['10', '11', '12', '1', '2'],
['11', '12', '13', '2'],
['9', '13'],
['9', '10'],
]));
Big O
If we say that n is the number of the rules, and m is number of elements in each rule, complexity of this algorithm is O(n*m). This takes into account that Set implementation for the JS is near O(1).
Flatten, remove duplicates and sort could be simpler:
const arrays = [
['A', 'B', 'C', 'D'],
['A', 'B', 'B-bis', 'B-ter', 'C', 'D', 'D-bis'],
['A', 'A-bis', 'B', 'C', 'D'],
['A', 'C', 'E'],
['A', 'B', 'C', 'D'],
];
console.log(
arrays
.flat()
.filter((u, i, all) => all.indexOf(u) === i)
.sort((a, b) => a.localeCompare(b)),
);
Or event simpler according to Mohammad Usman's now deleted post:
const arrays = [
['A', 'B', 'C', 'D'],
['A', 'B', 'B-bis', 'B-ter', 'C', 'D', 'D-bis'],
['A', 'A-bis', 'B', 'C', 'D'],
['A', 'C', 'E'],
['A', 'B', 'C', 'D'],
];
console.log(
[...new Set([].concat(...arrays))].sort((a, b) =>
a.localeCompare(b),
),
);
You can use .concat() with Set to get the resultant array of unique values:
const data = [
['A', 'B', 'C', 'D'],
['A', 'B', 'B-bis', 'B-ter', 'C', 'D', 'D-bis'],
['A', 'A-bis', 'B', 'C', 'D'],
['A', 'C', 'E'],
['A', 'B', 'C', 'D']
];
const result = [...new Set([].concat(...data))].sort((a, b) => a.localeCompare(b));
console.log(result);
.as-console-wrapper { max-height: 100% !important; top: 0; }
Create a single array using array#concat and then using Set get the unique values from this array then sort the array.
const arrays = [ ['A', 'B', 'C', 'D'], ['A', 'B', 'B-bis', 'B-ter', 'C', 'D', 'D-bis'], ['A', 'A-bis', 'B', 'C', 'D'], ['A', 'C', 'E'], ['A', 'B', 'C', 'D'] ],
result = [...new Set([].concat(...arrays))].sort();
console.log(result);
merge [].concat.apply([], arrays)
find uniq [...new Set(merged)]
sort .sort()
const arrays = [
['A', 'B', 'C', 'D'],
['A', 'B', 'B-bis', 'B-ter', 'C', 'D', 'D-bis'],
['A', 'A-bis', 'B', 'C', 'D'],
['A', 'C', 'E'],
['A', 'B', 'C', 'D']
];
let merged = [].concat.apply([], arrays); // merge array
let sort = [...new Set(merged)].sort(); // find uniq then sort
console.log(sort);
Fun problem to solve; I think I only partly succeeded.
I ignored the "underspecified" example of B -> A -> T vs T -> B -> A
It's very inefficient
Still posting cause I think it might help you get things right. Here's my approach:
Step 1: create a naive index
We're creating an object that, for each unique element in the nested arrays, tracks which it has succeeded or preceded:
{
"X": { prev: Set({}), next: Set({ "D", "H", "B", "K", "Z", "A", "M", "T" })
"M": { prev: Set({ "X" }), next: Set({ "D", "H", "B" })
// etc.
}
I named it "naive" because these Sets only contain information of one level deep.
I.e.: they only report relations between elements that were in the same array. They cannot see the M comes before the K because they were never in the same array.
Step 2: join the indexes recursively
This is where I ignored all big-O concerns one might have 😉. I merge the index recursively: The next of M is a join of the next of D, H, B. Recurse until you found an element that has no next, i.e. the T or A.
Step 3: create a sorter that respects the sort index:
const indexSorter = idx => (a, b) =>
idx[a].next.has(b) || idx[b].prev.has(a) ? -1 :
idx[a].prev.has(b) || idx[b].next.has(a) ? 1 :
0 ;
This function creates a sort method that uses the generated index to look up the sort order between any two elements.
Bringing it all together:
(function() {
const naiveSortIndex = xss => xss
.map(xs =>
// [ prev, cur, next ]
xs.map((x, i, xs) => [
xs.slice(0, i), x, xs.slice(i + 1)
])
)
// flatten
.reduce((xs, ys) => xs.concat(ys), [])
// add to index
.reduce(
(idx, [prev, cur, next]) => {
if (!idx[cur])
idx[cur] = {
prev: new Set(),
next: new Set()
};
prev.forEach(p => {
idx[cur].prev.add(p);
});
next.forEach(n => {
idx[cur].next.add(n);
});
return idx;
}, {}
);
const expensiveSortIndex = xss => {
const naive = naiveSortIndex(xss);
return Object
.keys(naive)
.reduce(
(idx, k) => Object.assign(idx, {
[k]: {
prev: mergeDir("prev", naive, k),
next: mergeDir("next", naive, k)
}
}), {}
)
}
const mergeDir = (dir, idx, k, s = new Set()) =>
idx[k][dir].size === 0
? s
: Array.from(idx[k][dir])
.reduce(
(s, k2) => mergeDir(dir, idx, k2, s),
new Set([...s, ...idx[k][dir]])
);
// Generate a recursive sort method based on an index of { key: { prev, next } }
const indexSorter = idx => (a, b) =>
idx[a].next.has(b) || idx[b].prev.has(a) ? -1 :
idx[a].prev.has(b) || idx[b].next.has(a) ? 1 :
0;
const uniques = xs => Array.from(new Set(xs));
// App:
const arrays = [
['X', 'D', 'H', 'B'],
['X', 'D', 'K', 'Z', 'H', 'B', 'A'],
['X', 'M', 'D', 'H', 'B'],
['X', 'H', 'T'],
['X', 'D', 'H', 'B']
];
const sortIndex = expensiveSortIndex(arrays);
const sorter = indexSorter(sortIndex);
console.log(JSON.stringify(
uniques(arrays.flat()).sort(sorter)
))
}())
Recommendations
I suppose the elegant solution to the problem might be able to skip all the merging of Sets by using a linked list / tree-like structure and injecting elements at the right indexes by traversing until an element of its prev/next is found.
I would just flatten the arrays, map them as keys to an object (thus removing the doubles), and then sort the final result
const arrays = [
['A', 'B', 'C', 'D'],
['A', 'B', 'B-bis', 'B-ter', 'C', 'D', 'D-bis'],
['A', 'A-bis', 'B', 'C', 'D'],
['A', 'C', 'E'],
['A', 'B', 'C', 'D']
];
const final = Object.keys( arrays.flat().reduce( (aggregate, entry) => {
aggregate[entry] = '';
return aggregate;
}, {} ) ).sort( (x1, x2) => x1.localeCompare(x2) );
console.log( final );
To your code, after the merge you need to remove the duplicates. So you will get the unique array.
Use the array.sort, to sort the array.
I hope this will solve the issue.
const arrays = [
['A', 'B', 'C', 'D'],
['A', 'B', 'B-bis', 'B-ter', 'C', 'D', 'D-bis'],
['A', 'A-bis', 'B', 'C', 'D'],
['A', 'C', 'E'],
['A', 'B', 'C', 'D']
]
const merged = [].concat.apply([], arrays);
const unique = Array.from(new Set(merged))
const sorted = unique.sort()
console.log("sorted Array", sorted)
// Single Line
const result = [...new Set([].concat(...arrays))].sort();
console.log("sorted Array single line", result)
Use a BST for this. Add in all elements to the bst and then traverse in-order.
function BST(){
this.key = null;
this.value = null;
this.left = null;
this.right = null;
this.add = function(key}{
const val = key;
key = someOrderFunction(key.replace(/\s/,''));
if(this.key == null) {
this.key = key;
this.val = val;
} else if(key < this.key) {
if(this.left){
this.left.add(val);
} else {
this.left = new BST();
this.left.key = key;
this.left.val = val;
}
} else if(key > this.key) {
if(this.right){
this.right.add(val);
} else {
this.right= new BST();
this.right.key = key;
this.right.val = val;
}
}
this.inOrder = function(){
const leftNodeOrder = this.left ? this.left.inOrder() : [],
rightNodeOrder = this.right? this.right.inOrder() : [];
return leftNodeOrder.concat(this.val).concat(this.rightNodeOrder);
}
}
// MergeArrays uses a BST to insert all elements of all arrays
// and then fetches them sorted in order
function mergeArrays(arrays) {
const bst = new BST();
arrays.forEach(array => array.forEach( e => bst.add(e)));
return bst.inOrder();
}
My solution focuses nothing on efficiency, so I wouldn't try this for large arrays. But it works fine for me.
The idea is to walk through all elements multiple times and only insert an element into the sorted array in one of three cases:
The current element is first in its array, and one of its successors is first in the sorted array.
The current element is last in its array, and one of its predecessors is last in the sorted array.
The preceding element is in the sorted array and one of the current elements successors are directly succeeding this preceding element in the sorted array.
For the current problem, as stated above, the order between T and B, A, isn't uniquely determined. To handle this I use a flag force which takes any legal option when no new inserts could be made during an iteration.
The following rule from the problem is not implemented in my solution. If an item is appearing in multiple arrays at different positions, the first appearance is the right one (skip others). There is no hierarchy between the arrays. It should however be easy to implement the desired check and continue if it's not satisfied.
let merge = (arrays) => {
let sorted = [...arrays[0]];
const unused_rules = arrays.slice(1);
let not_inserted = unused_rules.flat().filter((v) => !sorted.includes(v));
let last_length = -1;
let force = false;
// avoids lint warning
const sortedIndex = (sorted) => (v) => sorted.indexOf(v);
// loop until all elements are inserted, or until not even force works
while (not_inserted.length !== 0 && !force) {
force = not_inserted.length === last_length; //if last iteration didn't add elements, our arrays lack complete information and we must add something using what little we know
last_length = not_inserted.length;
for (let j = 0; j < unused_rules.length; j += 1) {
const array = unused_rules[j];
for (let i = 0; i < array.length; i += 1) {
// check if element is already inserted
if (sorted.indexOf(array[i]) === -1) {
if (i === 0) {
// if element is first in its array, check if it can be prepended to sorted array
const index = array.indexOf(sorted[0]);
if (index !== -1 || force) {
const insert = array.slice(0, force ? 1 : index);
sorted = [...insert, ...sorted];
not_inserted = not_inserted.filter((v) => !insert.includes(v));
force = false;
}
} else if (i === array.length - 1) {
// if element is last in its array, check if it can be appended to sorted array
const index = array.indexOf(sorted[sorted.length - 1]);
if (index !== -1 || force) {
const insert = array.slice(force ? array.length - 1 : index + 1);
sorted = [...sorted, ...insert];
not_inserted = not_inserted.filter((v) => !insert.includes(v));
force = false;
}
} else {
const indices = array.map(sortedIndex(sorted)); // map all elements to its index in sorted
const predecessorIndexSorted = indices[i - 1]; // index in the sorted array of the element preceding current element
let successorIndexArray;
if (force) {
successorIndexArray = i + 1;
} else {
successorIndexArray = indices.indexOf(predecessorIndexSorted + 1); // index in the current array of the element succeeding the current elements predecessor in the sorted array
}
if (predecessorIndexSorted !== -1 && successorIndexArray !== -1) {
// insert all elements between predecessor and successor
const insert = array.slice(i, successorIndexArray);
sorted.splice(i, 0, ...insert);
not_inserted = not_inserted.filter((v) => !insert.includes(v));
force = false;
}
}
}
}
}
}
return sorted;
};
In fact, the rule If an item is appearing in multiple arrays at different positions, the first appearance is the right one (skip others). is a bit vague. For example using the arrays below, is it okay to end up with arrays[3] as the sorted array, since it doesn't violate the first appearance of any element, or should arrays[2] take precedence?
const arrays = [['a', 'b', 'd'],
['a', 'c', 'd'],
['a', 'b', 'c', 'd']
['a', 'c', 'b', 'd']]

Convert multiple arrays to single JSON

I need help converting several arrays:
x = ['a', 'b', 'c']
y = ['d', 'e', 'f']
z = ['d', 'g', 'h']
Into a single JSON:
{
a: { b: { c: 'done' }},
d: { e: { f: 'done' },
g: { h: 'done' }}
}
Is this possible using recursion? I can't seem to get it working properly so I was wondering if there is already an easy way to do this in JS.
var struct = {};
var paths = [
['a', 'b', 'c'],
['d', 'e', 'f'],
['d', 'g', 'h']
];
paths.forEach(function (path) {
var ref;
ref = struct;
path.forEach(function (elem, index) {
if (!ref[elem]) {
ref[elem] = index === path.length - 1 ? "done": {};
}
ref = ref[elem];
});
});
console.log(JSON.stringify(struct, null, "\t"));
Output:
{
"a": {
"b": {
"c": "done"
}
},
"d": {
"e": {
"f": "done"
},
"g": {
"h": "done"
}
}
}
Note: this script will fail if your input is like:
var paths = [
['a', 'b', 'c'],
['a', 'b', 'c', 'd' ]
];
It decided c should be "done" but then there is another level. maybe this wont ever happen, if it does, figure out what you want the result to be.
A version with Array#forEach() and Array#reduce()
var x = ['a', 'b', 'c'],
y = ['d', 'e', 'f'],
z = ['d', 'g', 'h'],
object = function (array) {
var o = {};
array.forEach(function (a) {
var last = a.pop();
a.reduce(function (r, b) {
r[b] = r[b] || {};
return r[b];
}, o)[last] = 'done';
});
return o;
}([x, y, z]);
document.write('<pre>' + JSON.stringify(object, 0, 4) + '</pre>');

Categories

Resources