Javascript Complex Grouping - javascript

I need help figuring out how I can loop through the Source Object to get the Output Strings. I currently have some nested for loops but I wind up missing one of the combinations or I get too much. The Number of Categories and Filters will vary
Source Object
var Source = {Cat1:{Filter1:'f1',Filter2:'f2'},
Cat2:{Filter3:'f3',Filter4:'f4'},
Cat3:{Filter5:'f5'}};
Output Strings
Filter1 Filter3 Filter5,
Filter1 Filter4 Filter5,
Filter2 Filter3 Filter5,
Filter2 Filter4 Filter5

Try this, this will return Array of arrays:
function cartesianProduct(paramArray) {
function addTo(curr, args) {
var i, copy,
rest = args.slice(1),
last = !rest.length,
result = [];
for (i = 0; i < args[0].length; i++) {
copy = curr.slice();
copy.push(args[0][i]);
if (last) {
result.push(copy);
} else {
result = result.concat(addTo(copy, rest));
}
}
return result;
}
return addTo([], Array.prototype.slice.call(arguments));
};
var Source = {Cat1:{Filter1:'f1',Filter2:'f2'},
Cat2:{Filter3:'f3',Filter4:'f4'},
Cat3:{Filter5:'f5'}};
// var cats = [];
var filters = [];
for(var c in Source) {
filters.push([]);
for(var f in Source[c])
filters[filters.length - 1].push(f);
}
console.log(cartesianProduct.apply(this || window, filters));
HTH

Related

Remove data from an array comparing it to an other array

I am trying to compare the items in "item" array and the copyofOpList array to retrieve the data occurrences in copyofOpList
this is my try:
var _deleteUsedElement1 = function(item) {
for (var i = 0; i < item.length-1; i++){
for (var j = 0; j< $scope.copyofOpList.length-1; j++){
if (item[i].operationCode == $scope.copyofOpList[j].code) {
$scope.copyofOpList.splice(j, 1);
} } } };
$scope.compareArrays = function() {
...Get data from web Service
_deleteUsedElement1(item);
}
the copyofOpList array has 14 elements,and the item array has 2 array
but my code deletes only one occurrence (the first),so please how can I correct my code,to retrieve any occurances in the copyofOpList array comparing to the item array
thanks for help
I'd try to avoid looping inside a loop - that's neither a very elegant nor a very efficient way to get the result you want.
Here's something more elegant and most likely more efficient:
var item = [1,2], copyofOpList = [1,2,3,4,5,6,7];
var _deleteUsedElement1 = function(item, copyofOpList) {
return copyofOpList.filter(function(listItem) {
return item.indexOf(listItem) === -1;
});
};
copyofOpList = _deleteUsedElement1(item, copyofOpList);
console.log(copyofOpList);
//prints [3,4,5,6,7]
}
And since I just noticed that you're comparing object properties, here's a version that filters on matching object properties:
var item = [{opCode:1},{opCode:2}],
copyofOpList = [{opCode:1},{opCode:2},{opCode:3},{opCode:4},{opCode:5},{opCode:6},{opCode:7}];
var _deleteUsedElement1 = function(item, copyofOpList) {
var iOpCodes = item.map(function (i) {return i.opCode;});
return copyofOpList.filter(function(listItem) {
return iOpCodes.indexOf(listItem.opCode) === -1;
});
};
copyofOpList = _deleteUsedElement1(item, copyofOpList);
console.log(copyofOpList);
//prints [{opCode:3},{opCode:4},{opCode:5},{opCode:6},{opCode:7}]
Another benefit of doing it in this manner is that you avoid modifying your arrays while you're still operating on them, a positive effect that both JonSG and Furhan S. mentioned in their answers.
Splicing will change your array. Use a temporary buffer array for new values like this:
var _deleteUsedElement1 = function(item) {
var _temp = [];
for (var i = 0; i < $scope.copyofOpList.length-1; i++){
for (var j = 0; j< item.length-1; j++){
if ($scope.copyofOpList[i].code != item[j].operationCode) {
_temp.push($scope.copyofOpList[j]);
}
}
}
$scope.copyofOpList = _temp;
};

Recursion: How can I remove

here's my code:
var asset = ['1234_12', '1234_34', '1234_33', '4321_22', '4321_90'];
var largest = removeElements(asset);
function removeElements(asset) {
var retVal = [];
for (i = 0; i < asset.length; i++) {
for (var j = 0; j < asset.length; j++) {
if (asset[i].split('_')[0] == asset[j].split('_')[0]) {
if (asset[i].split('_')[1].split('.')[0] > asset[j].split('_')[1].split('.')[0]) {
retVal = removeElements(asset, asset[j]);
for (var k = 0; k < retVal.length; k++) {
for (var l = 0; l < retVal.length; l++) {
if (retVal[k].split('_')[0] == retVal[l].split('_')[0]) {
removeElements(retVal);
} else {
return retVal;
}
}
}
}
}
}
}
return retVal;
}
Here's the structure of array:
var asset = ['1234_12', '1234_34', '1234_33', '4321_22', '4321_90'];
What I want is to get largest in '1234' or '4321' series. For example, in this case, I need to grab '1234_34' and '4321_90'.
RangeError: Maximum call stack size exceeded
What am I doing wrong?
You are making it harder than it is for yourself. You can just iterate over each item and store the matched values in an object:
var asset = ['1234_12', '1234_34', '1234_33', '4321_22', '4321_90'];
var intermediate = {};
asset.forEach(function(v) {
var parts = v.split('_');
var key = parts[0];
var val = parts[1];
if (!intermediate[key] || intermediate[key] < val) {
intermediate[key] = val;
}
});
This will produce an object like:
{"1234": "34", "4321": "90"}
Which you can then be turned into the expected array:
var output = Object.keys(intermediate).map(function(key) {
return key + '_' + intermediate[key];
});
console.log(output); // ["1234_34", "4321_90"]
Take a look at .forEach, .map and Object.keys
Here's an example of something that will grab those values (see the jsbin):
var asset = ['1234_12', '1234_34', '1234_33', '4321_22', '4321_90'];
var ids = _.values(_.mapValues(asset.reduce(function(agg, curr) {
var parts = curr.split('_');
agg[parts[0]] = agg[parts[0]] || [];
agg[parts[0]].push(parts[1]);
return agg;
}, {}), function(value, key) {
return [key, Math.max.apply(Math, value)].join('_');
}));
console.log(ids); // => ["1234_34", "4321_90"]
It uses lodash for convenience, but the principles are the same without it.
First you split each string into a key-value pair of the prefix and suffix (so 1234_12 and 1234_34, etc., becomes like { 1234: ['12', '34'] }). Then you just find the max value in that array and join it back with its key.

Best way to group elements in an array with least complexity

I have a JSON array which looks like this:
var map_results = [{"Type":"Flat","Price":100.9},
{"Type":"Room","Price":23.5},
{"Type":"Flat","Price":67.5},
{"Type":"Flat","Price":100.9}
{"Type":"Plot","Price":89.8}]
This array contains about 100,000 records. I want the output to be grouped by "Type" and "Price". It should look like this:
var expected_output = [{"Type":"Flat", "Data":[{"Price":100.9, "Total":2},
{"Price":67.5, "Total":1}] },
{"Type":"Room","Data":[{"Price":23.5,"Total":1}]},
{"Type":"Plot","Data":[{"Price":89.8, "Total:1"}]}]
This has to be done in pure javascript and I cannot use libraries like undersore.js. I tried solving the problem but it had like 3 nested for loops which made the complexity as n^4. What could be a better solution for this problem??
The function I have looks like this:
var reduce = function (map_results) {
var results = [];
for (var i in map_results) {
var type_found = 0;
for(var result in results){
if (map_results[i]["Type"] == results[result]["Type"]){
type_found = 1;
var price_found = 0;
for(var data in results[result]["Data"]){
if(map_results[i]["Price"] == results[result]["Data"][data]["Price"]){
price_found = 1;
results[result]["Data"][data]["Total"] +=1;
}
}
if(price_found == 0){
results[result]["Data"].push({"Price":map_results[i]["Price"], "Total":1});
}
}
}
if(type_found == 0){
results.push({"Type":map_results[i]["Type"], "Data":[{"Price":map_results[i]["Price"],"Total":1}]});
}
}
return results;
};
I have a short function that handles the first part of the requested functionality: It maps the map_results to the desired format:
var map_results = [{"Type":"Flat","Price":100.9},
{"Type":"Room","Price":23.5},
{"Type":"Flat","Price":67.5},
{"Type":"Flat","Price":100.9},
{"Type":"Plot","Price":89.8}]
var expected_output = map_results.reduce(function(obj, current){
if(!obj[current.Type]){
obj[current.Type] = {'Type':current.Type, 'Data':[]};
}
obj[current.Type].Data.push({'Price':current.Price, 'Total':1});
return obj;
},{})
Then this piece of code is required to calculate the totals, I'm afraid:
for(var type in expected_output){
var d = {};
for(var item in expected_output[type].Data){
d[expected_output[type].Data[item].Price] = (d[expected_output[type].Data[item].Price] || 0) + 1;
}
expected_output[type].Data = [];
for(var i in d){
expected_output[type].Data.push({
'Price':i,
'Total':d[i]
})
}
}
Output:
{
"Flat":{
"Type":"Flat",
"Data":[{"Price":"100.9","Total":2},
{"Price":"67.5","Total":1}]
},
"Room":{
"Type":"Room",
"Data":[{"Price":"23.5","Total":1}]
},
"Plot":{
"Type":"Plot",
"Data":[{"Price":"89.8","Total":1}]
}
}
As the Types and the Prices are unique after grouping I think a structure like {"Flat": {"100.9":2,"67.5":1}, {"Room": {"23.5": 1}}} would be easier to handle. So could do the grouping the following way:
var output = {};
map_results.map(function(el, i) {
output[el["Type"]] = output[el["Type"]] || [];
output[el["Type"]][el["Price"] = (output[el["Type"]][el["Price"]+1) || 1;
});
If you can not handle this structure you could do another mapping to your structure.
As you are iterating the Array one time this should have a complexity of n.
Look here for a working fiddle.
EDIT: So remap everything to your structure. The order of the remapping is far less then the first mapping, because the grouping is already done.
var expected_output = [];
for(type in output) {
var prices = [];
for(price in output[type]) {
prices.push({"Price": price, "Total": output[type][price]);
}
expected_output.push({"Type": type, "Data": prices});
}
Below is yet another effort. Here's a FIDDLE
For performance testing, I also mocked up a JSPerf test with 163840 elements. On Chrome(OSX) original solution is 90% slower than this one.
Few notes:
Feel free to optimize for your case (e.g. take out the hasOwnProperty check on object cloning).
Also, if you need the latest Total as the first element use unshift instead of push to add the obj the beginning of the array.
function groupBy(arr, key, key2) {
var retArr = [];
arr.reduce(function(previousValue, currentValue, index, array){
if(currentValue.hasOwnProperty(key)) {
var kVal = currentValue[key];
if(!previousValue.hasOwnProperty(kVal)) {
previousValue[kVal] = {};
retArr.push(previousValue[kVal]);
previousValue[kVal][key] = kVal;
previousValue[kVal]["Data"] = [];
}
var prevNode = previousValue[kVal];
if(currentValue.hasOwnProperty(key2)) {
var obj = {};
for(var k in currentValue) {
if(currentValue.hasOwnProperty(k) && k!=key)
obj[k] = currentValue[k];
}
obj["Total"] = prevNode["Data"].length + 1;
prevNode["Data"].push(obj);
}
}
return previousValue;
}, {});
return retArr;
}
var map_results = [{"Type":"Flat","Price":100.9},
{"Type":"Room","Price":23.5},
{"Type":"Flat","Price":67.5},
{"Type":"Flat","Price":100.9},
{"Type":"Plot","Price":89.8}];
var expected_output = groupBy(map_results, "Type", "Price");
console.dir(expected_output);
Tried something like this:
var reduce_func = function (previous, current) {
if(previous.length == 0){
previous.push({Type: current.Type, Data:[{Price:current.Price,Total:1}]});
return previous;
}
var type_found = 0;
for (var one in previous) {
if (current.Type == previous[one].Type){
type_found = 1;
var price_found = 0;
for(var data in previous[one].Data){
if(current.Price == previous[one].Data[data].Price){
price_found = 1;
previous[one].Data[data].Total += 1;
}
}
if(price_found == 0){
previous[one].Data.push({Price:current.Price, Total:1});
}
}
}
if(type_found == 0){
previous.push({Type:current.Type, Data:[{Price : current.Price ,Total:1}]});
}
return previous;
}
map_results.reduce(reduce_func,[]);

How do I create an Array into another Array?

I have the following JavaScript Array:
var jsonArray = { 'homes' :
[
{
"home_id":"203",
"price":"925",
"sqft":"1100",
"num_of_beds":"2",
"num_of_baths":"2.0",
},
{
"home_id":"59",
"price":"1425",
"sqft":"1900",
"num_of_beds":"4",
"num_of_baths":"2.5",
},
// ... (more homes) ...
]}
I want to convert this to the following type of Array (pseudo code):
var newArray = new Array();
newArray.push(home_id's);
How can I do that?
Notice how the newArray only has home_ids from the big jsonArray array.
Just make a new array and copy the old values in.
var ids = [];
for (var i = 0; i < jsonArray.homes.length; i++) {
ids[i] = jsonArray.homes[i].home_id;
}
Again, jsonArray is not an array but an object, but jsonArray.homes is
var arr = [];
for (var i=0, len = jsonArray.homes.length; i < len; i++){
arr.push(jsonArray.homes[i].home_id);
}
Here's one iterative way:
function getPropertyValues (array, id) {
var result = [];
for ( var hash in array ) {
result.push( array[hash][id]);
}
return result;
}
var home_ids = getPropertyValues(jsonArray.homes, "home_id");
Or if you want to do it real quick and dirty (and you are only targeting modern Javascript capable engines):
var home_ids = jsonArray.homes.map( function(record) { return record.home_id } );

fastest way to detect if duplicate entry exists in javascript array?

var arr = ['test0','test2','test0'];
Like the above,there are two identical entries with value "test0",how to check it most efficiently?
If you sort the array, the duplicates are next to each other so that they are easy to find:
arr.sort();
var last = arr[0];
for (var i=1; i<arr.length; i++) {
if (arr[i] == last) alert('Duplicate : '+last);
last = arr[i];
}
This will do the job on any array and is probably about as optimized as possible for handling the general case (finding a duplicate in any possible array). For more specific cases (e.g. arrays containing only strings) you could do better than this.
function hasDuplicate(arr) {
var i = arr.length, j, val;
while (i--) {
val = arr[i];
j = i;
while (j--) {
if (arr[j] === val) {
return true;
}
}
}
return false;
}
There are lots of answers here but not all of them "feel" nice... So I'll throw my hat in.
If you are using lodash:
function containsDuplicates(array) {
return _.uniq(array).length !== array.length;
}
If you can use ES6 Sets, it simply becomes:
function containsDuplicates(array) {
return array.length !== new Set(array).size
}
With vanilla javascript:
function containsDuplicates(array) {
return array
.sort()
.some(function (item, i, items) {
return item === items[i + 1]
})
}
However, sometimes you may want to check if the items are duplicated on a certain field.
This is how I'd handle that:
containsDuplicates([{country: 'AU'}, {country: 'UK'}, {country: 'AU'}], 'country')
function containsDuplicates(array, attribute) {
return array
.map(function (item) { return item[attribute] })
.sort()
.some(function (item, i, items) {
return item === items[i + 1]
})
}
Loop stops when found first duplicate:
function has_duplicates(arr) {
var x = {}, len = arr.length;
for (var i = 0; i < len; i++) {
if (x[arr[i]]) {
return true;
}
x[arr[i]] = true;
}
return false;
}
Edit (fix 'toString' issue):
function has_duplicates(arr) {
var x = {}, len = arr.length;
for (var i = 0; i < len; i++) {
if (x[arr[i]] === true) {
return true;
}
x[arr[i]] = true;
}
return false;
}
this will correct for case has_duplicates(['toString']); etc..
var index = myArray.indexOf(strElement);
if (index < 0) {
myArray.push(strElement);
console.log("Added Into Array" + strElement);
} else {
console.log("Already Exists at " + index);
}
You can convert the array to to a Set instance, then convert to an array and check if the length is same before and after the conversion.
const hasDuplicates = (array) => {
const arr = ['test0','test2','test0'];
const uniqueItems = new Set(array);
return array.length !== uniqueItems.size();
};
console.log(`Has duplicates : ${hasDuplicates(['test0','test2','test0'])}`);
console.log(`Has duplicates : ${hasDuplicates(['test0','test2','test3'])}`);
Sorting is O(n log n) and not O(n). Building a hash map is O(n). It costs more memory than an in-place sort but you asked for the "fastest." (I'm positive this can be optimized but it is optimal up to a constant factor.)
function hasDuplicate(arr) {
var hash = {};
var hasDuplicate = false;
arr.forEach(function(val) {
if (hash[val]) {
hasDuplicate = true;
return;
}
hash[val] = true;
});
return hasDuplicate;
}
It depends on the input array size. I've done some performance tests with Node.js performance hooks and found out that for really small arrays (1,000 to 10,000 entries) Set solution might be faster. But if your array is bigger (like 100,000 elements) plain Object (i. e. hash) solution becomes faster. Here's the code so you can try it out for yourself:
const { performance } = require('perf_hooks');
function objectSolution(nums) {
let testObj = {};
for (var i = 0; i < nums.length; i++) {
let aNum = nums[i];
if (testObj[aNum]) {
return true;
} else {
testObj[aNum] = true;
}
}
return false;
}
function setSolution(nums) {
let testSet = new Set(nums);
return testSet.size !== nums.length;
}
function sortSomeSolution(nums) {
return nums
.sort()
.some(function (item, i, items) {
return item === items[i + 1]
})
}
function runTest(testFunction, testArray) {
console.log(' Running test:', testFunction.name);
let start = performance.now();
let result = testFunction(testArray);
let end = performance.now();
console.log(' Duration:', end - start, 'ms');
}
let arr = [];
let setSize = 100000;
for (var i = 0; i < setSize; i++) {
arr.push(i);
}
console.log('Set size:', setSize);
runTest(objectSolution, arr);
runTest(setSolution, arr);
runTest(sortSomeSolution, arr);
On my Lenovo IdeaPad with i3-8130U Node.js v. 16.6.2 gives me following results for the array of 1,000:
results for the array of 100,000:
Assuming all you want is to detect how many duplicates of 'test0' are in the array. I guess an easy way to do that is to use the join method to transform the array in a string, and then use the match method.
var arr= ['test0','test2','test0'];
var str = arr.join();
console.log(str) //"test0,test2,test0"
var duplicates = str.match(/test0/g);
var duplicateNumber = duplicates.length;
console.log(duplicateNumber); //2

Categories

Resources