function lcm(arr) {
arr = arr.sort(function(a, b) {
return a - b;
});
var j = 1;
var num = arr[0];
for (i = 1; i < arr.length; i++) {
while (num % arr[i] !== 0) {
j = j + 1;
num = j * arr[0];
}
arr[0] = num;
}
return num;
}
console.log(lcm([3, 5, 6, 10]));
I am trying to find the least common multiple for a range of numbers in an array. The code works fine for array with two items, however the output for arrays with more than two items seems to exceed the value expected.
Can anyone help me find the bug in my code ?
Thank you
Set j back to 1 each time through the loop through the array elements. Otherwise, when you process the next number, you start with a high multiplier.
// function that find the least common multiple
function lcm(arr) {
arr = arr.sort(function(a, b) {
return a - b;
});
var num = arr[0];
for (i = 1; i < arr.length; i++) {
var j = 1;
while (num % arr[i] !== 0) {
j = j + 1;
num = j * arr[0];
}
arr[0] = num;
}
return num;
}
console.log(lcm([3, 5, 6, 10]));
Related
This is my solution and it passes some of the tests, but not all of them. Can anyone help me and explain why? Thank you :)
function evenLast(numbers) {
let sum = 0;
let lastNum = numbers.pop();
let arr = numbers.filter(el => el % 2 === 0);
for(let i = 0; i < arr.length; i++) {
sum += (arr[i] * lastNum);
}
return sum;
}
You need to check the index, not the value
let arr = numbers.filter((_, i) => i % 2 === 0);
And you could multiply the sum at the last step.
for (let i = 0; i < arr.length; i++) {
sum += arr[i]);
}
return sum * lastNum;
A better approach takes only a single loop and sums the values by taking an increment of two.
function evenLast(numbers) {
let sum = 0;
for (let i = 0; i < numbers.length; i += 2) sum += numbers[i];
return sum * numbers[numbers.length - 1];
}
function evenLast(arr) {
var lastarr = arr.slice(arr.length-1)
//return lastarr;
let newarr = [];
for(i=0; i<arr.length; i++){
if(arr[i] % 2 === 0) {
newarr.push(arr[i]);
}
}
//return newarr;
var sum1 =0;
for(i=0; i<newarr.length; i++) {
var sum = newarr[0] * lastarr[0];
var sum1 = newarr[1] * lastarr[0];
//return sum;
var sum2 = sum1 + sum;
//i++;
}
return sum2;
}
console.log(evenLast([2, 3, 4, 5]))
I'm trying to get all the numbers that are higher than the average of a given Array.
(this goes into an HTML page so it's with document.write
this is what I wrote:
sumAndBigger(arrayMaker());
function sumAndBigger(array) {
for (i = 0; i < array.length; i++) {
sum += array;
}
var equalAndBigger = []
var avg = sum / array.length;
for (i = 0; i < array.length; i++) {
if (array[i] > avg) {
equalAndBigger.push(array[i])
}
}
document.write('The numbers are: ' + equalAndBigger)
}
function arrayMaker() {
var array = [];
for (i = 0; i < 5; i++) {
var entrie = +prompt('Enter a number: ');
array.push(entrie)
}
return array;
}
This doesn't seem to work.. what am I doing wrong here?
Thanks in advance!
Ok so here I am giving you a one-liner code to get all the elements from the array that are "strictly greater than" the average value
let array = [1, 2, 3, 4, 5]
let allNums = array.filter(v => v > array.reduce((x, y) => x + y) / array.length);
Explanation
array.reduce((x, y) => x + y) → sum of all elements in the array
array.reduce((x, y) => x + y) / array.length → getting the average
Output
[4, 5]
MORE DETAILED CODE
function getAverage(arr) {
let sum = 0;
for (let i = 0; i < arr.length; i++) {
sum += arr[i];
}
return sum / arr.length;
}
function getGreaterThanAverage(arr) {
let avg = getAverage(arr);
let numbers = [];
for (let i = 0; i < arr.length; i++) {
if (arr[i] > avg) {
numbers.push(arr[i]);
}
}
return numbers;
}
I have the code below for matrix formation out of an array and its multiplication.
But when I try to multiply two matrices like mtp(matrix(2,2,[1,2,3,4]),matrix(2,2,[1,0,0,1])) it returns NaN in all places.
Please help me out
function matrix(m, n, arr) {
var result = {};
for (t = 1; t <= m; t++) {
result[t] = {};
};
for (i = 1; i <= m; i++)
for (j = 1; j <= n; j++) result[i][j] = arr[m * (i - 1) + j - 1];
return {
"result": result,
"m": m,
"n": n
};
}
function mtp(a, b) {
if (parseInt(a.n) != parseInt(b.m)) {
return;
} else {
var result = [];
var m = parseInt(a.m);
var n = parseInt(b.n);
var k = parseInt(a.n);
for (i = 1; i <= m; i++) {
for (j = 1; j <= n; j++) {
for (p = 1; p <= k; p++) {
result[m * (i - 1) + j - 1] += (parseInt(a.result[i][p]) * parseInt(b.result[p][j]));
console.log(parseInt(a.result[i][p]) * parseInt(b.result[p][j]))
}
}
}
}
console.log(result, matrix(m, n, result).result);
}
mtp(matrix(2,2,[1,2,3,4]),matrix(2,2,[1,0,0,1]));
When you define result it is an array of zero elements
var result = [];
When you try to add a number to an element of the array, that element is not defined, Adding any number to undefined gives you NaN. There are 2 ways to solve this, either initialize your array with the right length of zeros, or default it to zero during the sum. I've chosen the latter below,
result[m * (i - 1) + j - 1] = (result[m * (i - 1) + j - 1]||0) + (a.result[i][p] * b.result[p][j]);
// Note here ---------------------------------------------^
I've got rid of all the unnecessary parseInt calls.
function matrix(m, n, arr) {
var result = {};
for (t = 1; t <= m; t++) {
result[t] = {};
};
for (i = 1; i <= m; i++)
for (j = 1; j <= n; j++) result[i][j] = arr[m * (i - 1) + j - 1];
return {
"result": result,
"m": m,
"n": n
};
}
function mtp(a, b) {
if (a.n != b.m) {
return;
} else {
var result = [];
var m = a.m;
var n = b.n;
var k = a.n;
for (i = 1; i <= m; i++) {
for (j = 1; j <= n; j++) {
for (p = 1; p <= k; p++) {
result[m * (i - 1) + j - 1] = (result[m * (i - 1) + j - 1]||0) + (a.result[i][p] * b.result[p][j]);
console.log(a.result[i][p] * b.result[p][j])
}
}
}
}
console.log(result, matrix(m, n, result).result);
}
mtp(matrix(2,2,[1,2,3,4]),matrix(2,2,[1,0,0,1]));
In your mtp() function the values of the elements inside your result array are uninitialized (that is, you've defined an array, but that array has no actual values or even a determined length), so in your inner loop when you +=, you're referencing a newly created array element which has no default value (and therefore is undefined), and adding some number to that undefined value.
An undefined value plus a number is not a number (NaN). Try preinitializing your result array.
I am trying to solve an exercise about array helpers in Javascript, this is my code.
var numbers = [1, 2, 3, 4, 5];
function square() {
var arraySquare = [];
for (i = 0; i < numbers.length; i++) {
arraySquare[i] = numbers[i] * numbers[i];
arraySquare.push(arraySquare[i]);
}
return arraySquare;
}
console.log(square());
function cube() {
var arrayCube = [];
for (i = 0; i < numbers.length; i++) {
arrayCube[i] = numbers[i] * numbers[i] * numbers[i];
arrayCube.push(arrayCube[i]);
}
return arrayCube;
}
console.log(cube());
function arrayAverage() {
var sum = 0;
var average = 0;
if (numbers === []) {
return NaN;
}
else {
for (i = 0; i < numbers.length; i++) {
sum = sum + numbers[i];
}
average = sum / i;
}
return average;
}
console.log(arrayAverage());
function arraySum() {
var sum = 0;
for (i = 0; i < numbers.length; i++) {
sum = sum + numbers[i];
}
return sum;
}
console.log(arraySum());
function even() {
var arrayEven = [];
for (i = 0; i < numbers.length; i++) {
if (numbers[i] % 2 === 0) {
arrayEven.push(numbers[i]);
}
}
return arrayEven;
}
console.log(even());
function odd() {
var arrayOdd = [];
for (i = 0; i < numbers.length; i++) {
if (numbers[i] % 2 !== 0) {
arrayOdd.push(numbers[i]);
}
}
return arrayOdd;
}
console.log(odd());
For some reason, the square() and cube() function, push the last element in the new arrays twice. Do you have any idea why this could happen?
Aside from this, the code seems to work just fine. If you notice any other problem in the code please mention it!
Any help will be really appreciated!
Because you are setting the ith element, and after that you are pushing a new value onto the array:
arrayCube[i] = numbers[i] * numbers[i] * numbers[i];
arrayCube.push(arrayCube[i]);
You should probably just do:
arrayCube.push(numbers[i] * numbers[i] * numbers[i]);
The problem lies here
arraySquare[i] = numbers[i] * numbers[i];
arraySquare.push(arraySquare[i]);
You are updating the array two times, your function doesn't just add an extra final number, but it adds two numbers, one at i and one at i+1 every time, the one at i+1 get overwritten the next iteration that's why only the final one stays.
you should just keep the first line
Checked for Square function. It worked for me.
var numbers = [1, 2, 3, 4, 5];
function square() {
var arraySquare = []; var a ;
for (i = 0; i < numbers.length; i++) {
a = numbers[i] * numbers[i];
arraySquare.push(a);
}
return arraySquare;
}
console.log(square());
Hope this works for both functions.
Regards,
Eby J
I'm trying to display all the prime numbers up to 10 and it isn't working. Can you see what I did wrong?
function findPrimeNumbers() {
var count = 10,
primes = [];
for (var i = 0; i <= count; i++) {
if (count / i === 1 || count) primes.push(i);
else continue;
count -= 1;
}
for (var i = 0, len = primes.length; i < len; i++) return primes[i];
}
console.log(findPrimeNumbers());
It only returns 0 in the console.
Here's about the simplest way to generate primes. Note that there are more efficient methods, but they are harder to understand.
function findPrimeNumbers (count) {
var primes = [];
for (var J = 2; J <= count; J++) {
var possPrime = true;
for (var K = 2, factorLim = Math.sqrt (J); K <= factorLim; K++) {
if (J % K == 0) {
possPrime = false;
break;
}
}
if (possPrime)
primes.push (J);
}
return primes;
}
console.log (findPrimeNumbers (10) );
This yields all the primes <= 10:
[2, 3, 5, 7]
See Wikipedia for an explanation.
for (var i = 0, len = primes.length; i < len; i++) return primes[i];
Here you are return just the first element of the array. I think you meant something like this
var retstr = "";
for (var i = 0, len = primes.length; i < len; i++)
{
//To improve str format
if(i == len-1)
retstr += primes[i];
else
retstr += primes[i] + ", ";
}
return retstr;
Hope this helps.
if (count / i === 1 || count / i === count)
You don't say how it's not working, but the first thing that comes to my attention is that you're incrementing i, while at the same time decrementing count, so i will never get all the way to 10.
Also, count / i will cause a divide-by-zero error on the first iteration as it's written (unless Javascript magically handles that case in some way I'm not familiar with).
Then you "loop" through your return values--but you can only return once from a function, so of course you're only going to return the first value.
And you are returning from the function in the last for loop. Remove that for loop, just return the array.
function PrimeCheck(n){ //function to check prime number
for(i=2;i<n;i++){
if(n%i==0){
return false
}
}
return true;
}
function print(x){ //function to print prime numbers
var primeArray=[];
for(j=2;j<x;j++){
if(PrimeCheck(j)==true){
primeArray.push(j);
}
}
console.log(primeArray);
}
print(10); //[2,3,5,7]