Given a sequence of integers, return the sum of all the integers that have an even index, multiplied by the integer at the last index - javascript

This is my solution and it passes some of the tests, but not all of them. Can anyone help me and explain why? Thank you :)
function evenLast(numbers) {
let sum = 0;
let lastNum = numbers.pop();
let arr = numbers.filter(el => el % 2 === 0);
for(let i = 0; i < arr.length; i++) {
sum += (arr[i] * lastNum);
}
return sum;
}

You need to check the index, not the value
let arr = numbers.filter((_, i) => i % 2 === 0);
And you could multiply the sum at the last step.
for (let i = 0; i < arr.length; i++) {
sum += arr[i]);
}
return sum * lastNum;
A better approach takes only a single loop and sums the values by taking an increment of two.
function evenLast(numbers) {
let sum = 0;
for (let i = 0; i < numbers.length; i += 2) sum += numbers[i];
return sum * numbers[numbers.length - 1];
}

function evenLast(arr) {
var lastarr = arr.slice(arr.length-1)
//return lastarr;
let newarr = [];
for(i=0; i<arr.length; i++){
if(arr[i] % 2 === 0) {
newarr.push(arr[i]);
}
}
//return newarr;
var sum1 =0;
for(i=0; i<newarr.length; i++) {
var sum = newarr[0] * lastarr[0];
var sum1 = newarr[1] * lastarr[0];
//return sum;
var sum2 = sum1 + sum;
//i++;
}
return sum2;
}
console.log(evenLast([2, 3, 4, 5]))

Related

How to push both sum of even and odd result from for loop into an array?

Use for loop to iterate from 0 to 100 and print the sum of all evens and the sum of all odds. Print sum of evens and sum of odds as array
Output: [2550, 2500]
let sumOfEven = 0;
let EvenOddArr = [];
for (let i = 0; i <= 100; i += 2) {
sumOfEven += i;
}
console.log(sumOfEven);
let sumOfOdd = 0;
for (let i = 1; i <= 100; i += 2) {
sumOfOdd += i;
}
console.log(sumOfOdd);
console.log(EvenOddArr);
You could take the remainder of two as index for the array.
const evenOddArr = [0, 0];
for (let i = 0; i <= 100; i++) evenOddArr[i % 2] += i;
console.log(evenOddArr);
You're nearly there - all you need is a couple of pushes
let sumOfEven = 0;
let EvenOddArr = [];
for (let i = 0; i <= 100; i += 2) {
sumOfEven += i;
}
EvenOddArr.push(sumOfEven)
let sumOfOdd = 0;
for (let i = 1; i <= 100; i += 2) {
sumOfOdd += i;
}
EvenOddArr.push(sumOfOdd)
console.log(EvenOddArr);
console.log(Array(101).fill().reduce((a,_,i)=>(a[i%2]+=i,a),[0,0]))
Here is an alternative for when you have studied JS a bit more
let sumArr = Array.from({ length: 101 })
.reduce((acc,_,i) => (acc[i % 2] += i, acc), [0, 0]);
console.log(sumArr);
An easy-to-understand version:
let sumOfEven = 0;
let sumOfOdd = 0;
for (let i = 0; i <= 100; i++) {
if (i % 2 === 0) {
sumOfEven += i;
} else {
sumOfOdd += i;
}
}
let evenOddArr = [sumOfEven, sumOfOdd];
console.log(evenOddArr);

Two Sum Leetcode

I wrote the code, but for some reason it displays the index 1, 2, 3, while 3 + 4 will in no way be equal to target (6).
var twoSum = function(nums, target) {
let sum = [];
var n = 2;
for(let i = 0; i < nums.length; i++) {
for(let a = 1; a < nums.length; a++) {
if(nums[i] + nums[a] == target) {
sum.push(i);
sum.push(a);
}
}
}
let unique = sum.filter((e, i) => sum.indexOf(e) === i )
return unique/* .slice(0, n); */
};
console.log(twoSum([1,3,4,2],6))
Input
[1,3,4,2]
6
Output
[1,2]
Expected
[2,3]
As per my comment, start the inner loop at a = i + 1 to avoid summing numbers with themselves as well as to avoid checking the same combination twice, e.g (1, 2) and (2, 1):
var twoSum = function(nums, target) {
let sum = [];
let n = 2;
for (let i = 0; i < nums.length; i++) {
for (let a = i + 1; a < nums.length; a++) {
if (nums[i] + nums[a] === target) {
sum.push(i);
sum.push(a);
}
}
}
let unique = sum.filter((e, i) => sum.indexOf(e) === i )
return unique/* .slice(0, n); */
};

Getting values higher than average in Array - JS

I'm trying to get all the numbers that are higher than the average of a given Array.
(this goes into an HTML page so it's with document.write
this is what I wrote:
sumAndBigger(arrayMaker());
function sumAndBigger(array) {
for (i = 0; i < array.length; i++) {
sum += array;
}
var equalAndBigger = []
var avg = sum / array.length;
for (i = 0; i < array.length; i++) {
if (array[i] > avg) {
equalAndBigger.push(array[i])
}
}
document.write('The numbers are: ' + equalAndBigger)
}
function arrayMaker() {
var array = [];
for (i = 0; i < 5; i++) {
var entrie = +prompt('Enter a number: ');
array.push(entrie)
}
return array;
}
This doesn't seem to work.. what am I doing wrong here?
Thanks in advance!
Ok so here I am giving you a one-liner code to get all the elements from the array that are "strictly greater than" the average value
let array = [1, 2, 3, 4, 5]
let allNums = array.filter(v => v > array.reduce((x, y) => x + y) / array.length);
Explanation
array.reduce((x, y) => x + y) → sum of all elements in the array
array.reduce((x, y) => x + y) / array.length → getting the average
Output
[4, 5]
MORE DETAILED CODE
function getAverage(arr) {
let sum = 0;
for (let i = 0; i < arr.length; i++) {
sum += arr[i];
}
return sum / arr.length;
}
function getGreaterThanAverage(arr) {
let avg = getAverage(arr);
let numbers = [];
for (let i = 0; i < arr.length; i++) {
if (arr[i] > avg) {
numbers.push(arr[i]);
}
}
return numbers;
}

Array Helpers javascript exercise

I am trying to solve an exercise about array helpers in Javascript, this is my code.
var numbers = [1, 2, 3, 4, 5];
function square() {
var arraySquare = [];
for (i = 0; i < numbers.length; i++) {
arraySquare[i] = numbers[i] * numbers[i];
arraySquare.push(arraySquare[i]);
}
return arraySquare;
}
console.log(square());
function cube() {
var arrayCube = [];
for (i = 0; i < numbers.length; i++) {
arrayCube[i] = numbers[i] * numbers[i] * numbers[i];
arrayCube.push(arrayCube[i]);
}
return arrayCube;
}
console.log(cube());
function arrayAverage() {
var sum = 0;
var average = 0;
if (numbers === []) {
return NaN;
}
else {
for (i = 0; i < numbers.length; i++) {
sum = sum + numbers[i];
}
average = sum / i;
}
return average;
}
console.log(arrayAverage());
function arraySum() {
var sum = 0;
for (i = 0; i < numbers.length; i++) {
sum = sum + numbers[i];
}
return sum;
}
console.log(arraySum());
function even() {
var arrayEven = [];
for (i = 0; i < numbers.length; i++) {
if (numbers[i] % 2 === 0) {
arrayEven.push(numbers[i]);
}
}
return arrayEven;
}
console.log(even());
function odd() {
var arrayOdd = [];
for (i = 0; i < numbers.length; i++) {
if (numbers[i] % 2 !== 0) {
arrayOdd.push(numbers[i]);
}
}
return arrayOdd;
}
console.log(odd());
For some reason, the square() and cube() function, push the last element in the new arrays twice. Do you have any idea why this could happen?
Aside from this, the code seems to work just fine. If you notice any other problem in the code please mention it!
Any help will be really appreciated!
Because you are setting the ith element, and after that you are pushing a new value onto the array:
arrayCube[i] = numbers[i] * numbers[i] * numbers[i];
arrayCube.push(arrayCube[i]);
You should probably just do:
arrayCube.push(numbers[i] * numbers[i] * numbers[i]);
The problem lies here
arraySquare[i] = numbers[i] * numbers[i];
arraySquare.push(arraySquare[i]);
You are updating the array two times, your function doesn't just add an extra final number, but it adds two numbers, one at i and one at i+1 every time, the one at i+1 get overwritten the next iteration that's why only the final one stays.
you should just keep the first line
Checked for Square function. It worked for me.
var numbers = [1, 2, 3, 4, 5];
function square() {
var arraySquare = []; var a ;
for (i = 0; i < numbers.length; i++) {
a = numbers[i] * numbers[i];
arraySquare.push(a);
}
return arraySquare;
}
console.log(square());
Hope this works for both functions.
Regards,
Eby J

least common multiple: What is wrong with my code?

function lcm(arr) {
arr = arr.sort(function(a, b) {
return a - b;
});
var j = 1;
var num = arr[0];
for (i = 1; i < arr.length; i++) {
while (num % arr[i] !== 0) {
j = j + 1;
num = j * arr[0];
}
arr[0] = num;
}
return num;
}
console.log(lcm([3, 5, 6, 10]));
I am trying to find the least common multiple for a range of numbers in an array. The code works fine for array with two items, however the output for arrays with more than two items seems to exceed the value expected.
Can anyone help me find the bug in my code ?
Thank you
Set j back to 1 each time through the loop through the array elements. Otherwise, when you process the next number, you start with a high multiplier.
// function that find the least common multiple
function lcm(arr) {
arr = arr.sort(function(a, b) {
return a - b;
});
var num = arr[0];
for (i = 1; i < arr.length; i++) {
var j = 1;
while (num % arr[i] !== 0) {
j = j + 1;
num = j * arr[0];
}
arr[0] = num;
}
return num;
}
console.log(lcm([3, 5, 6, 10]));

Categories

Resources