Here is my code;
var data = [[40, 20, 60], [20, 30, 10], [50, 75, 40]];
var averageData = [];
data.forEach(function(entries) {
entries.reduce(function(a, b) {
return a + b[1];
}, 0);
console.log(entries);
});
I would like to be able to add the numbers from each array together.
But I'm not sure how I can get each array of numbers added together from the forEach loop?
From this data I would like to output instead [120, 60, 165].
It is important that the data is within a nested array, the aim is to try get it out of the nested array onto a single line with the above output.
Hope someone can offer some advice!
Thanks
Use Array#map instead
Note that b[1] holds nothing(undefined) and entries.reduce returns a reduced value, either return it of keep it in variable
var data = [
[40, 20, 60],
[20, 30, 10],
[50, 75, 40]
];
var averageData = data.map(function(entries) {
return entries.reduce(function(a, b) {
return a + b;
}, 0);
});
console.log(averageData)
Edit-
As suggested in comments by #Tushar, ES6 version
var data = [
[40, 20, 60],
[20, 30, 10],
[50, 75, 40]
];
console.log(data.map(arr => arr.reduce((a, b) => a + b, 0)));
reduce() will return the sum as you want at last. See as below.
var data = [[40, 20, 60], [20, 30, 10], [50, 75, 40]];
var averageData = []
data.forEach(function(entries) {
var sum = entries.reduce(function(a, b) {
return a + b;
}, 0)
console.log(sum)
})
If your values are safe (meaning you know where there come from) you could use eval to quickly sum the values.
var data = [[40, 20, 60], [20, 30, 10], [50, 75, 40]];
var totals = [];
data.forEach(function(a, i) {
totals.push(eval(a.join('+')));
});
console.log(totals);
Not the most efficient way but it works
Related
How do I calcualte the overall average of an array like this:
example array:
[[80, 90, 70], [70,80,60],[90,100,80]]
what I am trying right now
for (let i = 0; i < grades.length; i++) {
for (let y = 0; y < grades[i].length; y++) {
lastTotalScore += grades[i[y]];
lastAverageScore = lastTotalScore / grades[i].length;
overallTotalScore += lastAverageScore;
overallAverageScore = overallTotalScore / grades.length;
}
}
Thanks
flatten the array, and then reduce over the elements adding them up, and then dividing by the length of the flattened array.
const arr = [
[80, 90, 70],
[70, 80, 60],
[90, 100, 80]
];
// Flatten the nested arrays
const flat = arr.flat();
// `reduce` over the numbers, and then divide
// by the number of elements in the array
const avg = flat.reduce((acc, c) => {
return acc + c;
}, 0) / flat.length;
console.log(avg);
Calculate the total from all numbers. Divide it with length of total numbers.
Working Sample
const grades = [[80, 90, 70], [70, 80, 60], [90, 100, 80]];
let lastTotalScore = 0;
let length = 0;
for (let i = 0; i < grades.length; i++) {
length += grades[i].length;
for (let y = 0; y < grades[i].length; y++) {
lastTotalScore += grades[i][y];
}
}
console.log(`Average = ${lastTotalScore / length}`);
OR
Convert the two dimentional array to a linear structure using Array.flat and calculate its sum by looping with Array.reduce and divide by its length.
Working Sample
const grades = [[80, 90, 70], [70, 80, 60], [90, 100, 80]];
const flatGrade = grades.flat();
const sum = flatGrade.reduce((acc, curr) => acc + curr, 0);
console.log(`Average = ${sum / flatGrade.length}`);
This would also work. Using flat and reduce.
const input = [
[80, 90, 70],
[70, 80, 60],
[90, 100, 80],
];
const { sum, count } = input.flat().reduce(
(prev, curr) => {
prev.sum += curr;
prev.count += 1;
return prev;
},
{ sum: 0, count: 0 }
);
console.log(sum / count);
As some of the others suggested, flattening the array does the trick nicely.
The only additional thing I would consider is to wrap the code into a function, which can then be reused, to keep things DRY.
const someArray = [[80, 90, 70], [70, 80, 60], [90, 100, 80]];
function getAverage(arr) {
const oneArr = arr.flat();
const avg = oneArr.reduce((sum, value) => sum += value) / oneArr.length;
return avg;
}
console.log(`The average grade is: ${getAverage(someArray)}.`);
Here's one that uses only reduce.
const arr = [
[80, 90, 70],
[70, 80, 60],
[90, 100, 80],
];
const overallAverage = arr.reduce((acc, curr) => acc + curr.reduce((a, c) => a + c) / curr.length, 0) / arr.length;
console.log(overallAverage);
This question already has answers here:
How can I refer to a variable using a string containing its name?
(7 answers)
Closed 4 years ago.
In my program, I have the user fill in various fields and choose a pokemon, and it will calculate its stats. I have the arrays under the pokemon names, and a drop down to choose which pokemon's stats to calculate. I then use eval() to change the string in the dropdown into the array's name in order to do the calculations. The full program can be found with this link
https://studio.code.org/projects/applab/8N1yw5e0CcjTik0rsmp2mgW0TSoU7DSge8k8j5mlhw0
but a simplified version is
var venusaur = [20, 10, 5, 10, 20]
var blastoise = [50, 50, 50, 10, 5]
var charizard = [100, 10, 5, 10, 100]
var mon = getText("dropdown");
findStats(eval(mon));
before I added the eval, it was much clunkier code, and would only get worse as I added more pokemon, a simpler version of which is
var venusaur = [20, 10, 5, 10, 20];
var blastoise = [50, 50, 50, 10, 5];
var charizard = [100, 10, 5, 10, 100];
if (getText(dropdown)=="venusaur"){
findStats(venusaur);
} else if (getText(dropdown)=="blastoise"){
findStats(blastoise);
} else if getText(dropdown)=="charizard"){
findStats(charizard);
}
I was just wondering if in this case, eval() was usable, as I've heard almost exclusively bad things about it, and I figured if there's a better way to accomplish this task, maybe I could learn it. Thanks in advance
Use an object and reference the object instead of using a bunch of global variables.
var pokes = {
venusaur: [20, 10, 5, 10, 20],
blastoise: [50, 50, 50, 10, 5],
charizard: [100, 10, 5, 10, 100]
}
var mon = getText("dropdown");
console.log(pokes[mon])
I think you're trying to do something like that...
const stats = {
venusaur: [20, 10, 5, 10, 20],
blastoise: [50, 50, 50, 10, 5],
charizard: [100, 10, 5, 10, 100],
};
const sel = document.querySelector('select');
const getStats = (e) => {
const res = document.getElementById("result");
res.innerHTML = stats[e.target.value] || 'not found';
}
sel.addEventListener('change', getStats);
<select>
<option value="null">Select...</option>
<option value="venusaur">Venusaur</option>
<option value="blastoise">Blastoise</option>
<option value="charizard">Charizard</option>
</select>
<div id="result"></div>
How do you determine the bucket to which a number belongs to? For ex, lets say we have buckets 0 - 20, 21 - 50, 51 - 80, 81 - 100 or the equivalent grades as 'Poor', 'Average', 'Good', 'Great'. Is there an efficient way using jquery/lodash/d3/underscore to find out that '45' belongs to the '21 - 50' bucket or is 'Average'?
Edit: Is this the best way to do it? In terms of speed, minimal code.
Here's what I have with a lot of help;
// Set up your data
var range = [[0, 20], [21, 50], [51, 80], [81, 100]]
var number = 45
range.find(function(val) { return val[1] >= number })
// Returns [21, 50]
range.findIndex(function(val) { return val[1] >= number })
// Returns 1
This should work...
var range = [[0, 20], [21, 50], [51, 80], [81, 100]]
var number = 45
var bucket = range.filter(function(a) {
if (number >= a[0] && number <= a[1]) return a
})
console.log(bucket[0])
You can use an array of objects with properties set to from "Poor" to "Great" corresponding to the range of numbers set as value of the property, Array.prototype.filter()
var range = [{
Poor: [0, 20]
}, {
Average: [21, 50]
}, {
Good: [51, 80]
}, {
Great: [81, 100]
}];
var number = 45;
var res = range.filter(function(el) {
var key = el[Object.keys(el)];
return number > key[0] && number < key[1]
});
console.log(Object.keys(res[0])[0])
The D3 way using scales:
var scale = d3.scaleQuantize()
.domain([0,100])
.range(["very bad","bad","average","good","very good"]);
console.log(scale(34));
console.log(scale(55));
console.log(scale(91));
<script src="https://d3js.org/d3.v4.min.js"></script>
The nice thing of D3 is that the scale automatically divides the domain based on the number of values of the range. For instance, the snippet above has 5 values ("very bad","bad","average","good","very good"), and so 34 is "bad". In the snippet below, using only 3 values, 34 is "average":
var scale = d3.scaleQuantize()
.domain([0,100])
.range(["bad","average","good"]);
console.log(scale(34));
console.log(scale(55));
console.log(scale(91));
<script src="https://d3js.org/d3.v4.min.js"></script>
var ranges = [
{
name: 'Poor',
range: [0, 20]
},
{
name: 'Average',
range: [21, 50]
},
{
name: 'Good',
range: [51, 80]
},
{
name: 'Great',
range: [81, 100]
},
]
var number = 45;
var range = _.find(ranges, function (r) {
return _.inRange(number, r.range[0], r.range[1] + 1);
}).name;
I've got couple arrays I want to horizontally merge:
data1 = [["ID"], [21], [26], [32]]
data2 = [["A", "B", "C"],[10, 10, 10], [95, 95, 95], [95, 95, 95]]
Both array always have the same number of "rows"
I'm looking for this result:
result = [["ID", "A" , "B", "C"],[21, 10, 10, 10]...]
Currently I have the following code:
for ( var i = 0; i < data1.length; i++ ) {
data3.push( [ data1[i], data2[i] ] );
}
Which is giving me a strange result and I don't really understand why.
[[["A"], [10, 10 ,10]]]
Any help? I always struggle with Arrays.
That is not a weird result.
To achieve what you need, please try this
data3.push( data2[i].unshift(data1[i][0]) );
Assumption: A, B, C are Strings, like 'A'
Suggestion: You should follow a better datastructure. but still try the below solution. Array index starts from 1. You kept on changing the question, and I need to update my answer accordingly. I tried the below, it works for me.
var data1 = [["ID"], [21], [26], [32]];
var data2 = [["A", "B", "C"],[10, 10, 10], [95, 95, 95], [95, 95, 95]];
var data3 = [];
for ( var i = 0; i < data1.length; i++ ) {
data2[i].unshift(data1[i][0]);
data3.push(data2[i] );
}
console.log(data3)
Gives me the following output
[["ID","A","B","C"], [21, 10, 10, 10], [26, 95, 95, 95] ,[32, 95, 95, 95]]
I have an array of arrays in javascript set up within an object that I'm storing with local storage. It's high scores for a Phonegap game in the format {'0':[[score,time],[score,time]} where 0 is the category. I'm able to see the scores using a[1][0]. I want to sort with the high scores first.
var c={'0':[[100,11],[150,12]};
c.sort(function(){
x=a[0];
y=b[0];
return y-x;
}
However, b[0] always gives an undefined error.
I'm new to Javascript and making this is my first major test as a learning experience. Though I've looked at a number of examples on stackoverflow still can't figured this one out.
You need to declare the parameters to the comparator function.
c.sort(function(){
should be
c.sort(function(a, b){
and you need to call sort on an array as "am not i am" points out so
var c={'0':[[100,11],[150,12]]};
c[0].sort(function(a, b){
var x=a[0];
var y=b[0];
return y-x;
});
leaves c in the following state:
{
"0": [
[150, 12],
[100, 11]
]
}
function largestOfFour(arr) {
for(var x = 0; x < arr.length; x++){
arr[x] = arr[x].sort(function(a,b){
return b - a;
});
}
return arr;
}
largestOfFour([[4, 5, 1, 3], [13, 27, 18, 26], [32, 35, 37, 39], [1000, 1001, 857, 1]]);
var myArray = [ [11, 5, 6, 3, 10], [22,55,33,66,11,00], [32, 35, 37, 39], [1000, 1001, 1002, 1003, 999]];
_.eachRight(myArray, function(value) {
var descending = _.sortBy(value).reverse();
console.log(descending);
});
<script src="https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.5/lodash.js"></script>