Returning factorials in JavaScript - javascript

I'm trying to create a script that returns the factorial of the input number as part of a challenge. When I try to run it, it returns the proper factorial, but apparently I did it wrong somehow.
It looks like this:
function FirstFactorial(num) {
if (num > 1) {
var x = num;
for (var i = 1; i < x; i++) {
num = num * i;
}
} else if (num === 1) {
return 1;
} else {
console.log("That's not a number!");
}
return num;
}
Then I tried doing it like this, but it still doesn't work!
function FirstFactorial(num) {
if (num < 0) {
num = 0;
console.log("You have to input a number!");
}
if (num === 0) {
return 1;
}
return num * FirstFactorial(num - 1);
}

The most likely reason is that they expected and intended you to use recursion (a function that calls itself).
If you think about factorials, each builds on the result of the previous one, which is the classic case for using recursion.
(Note that I'm specifically not posting code doing this with recursion, because presumably the point here is for you to work out how to do it.)

Related

Why the Recursion in the function I declared is not working?

I am trying to understand recursion.
I am stuck with the following: I need to add up the numbers from a single number so if I call addUp(4) it should return 10 (4+3+2+1).
I came up with the below solution but it does not count the last number (1).
I know that the solution is to change the condition to "num===0" but I don't see the difference: in my head, my solution should work as well.
function addUp(num) {
if (num < 0) {
return num;
}
return num + addUp(num - 1);
}
console.log(addUp(4)); // 9
You need to check for less than or equal to zero. You want to stop if you hit zero as well. A log statement helps identify this.
Alternatively, you could evaluate num < 1.
function addUp(num) {
console.log(`Attempting to add: ${num}`);
if (num <= 0) {
return num;
}
console.log(`Added: ${num}`);
return num + addUp(num - 1);
}
console.log(`Total: ${addUp(4)}`);
.as-console-wrapper { top: 0; max-height: 100% !important; }
Code golf
The following code is only 19 bytes:
f=n=>n<1?n:n+f(n-1)
You can simplify this code and write it in one expression without recursion:
function addUp(num) {
return num*(num+1)/2;
}
The stopping condition should be num <= 0 and not num < 0.
function addUp(num) {
if (num <= 0) {
return num;
}
return num + addUp(num - 1);
}
console.log(addUp(3));
FLOWCHART

Prime Check JavaScript

What have I done wrong with this code? It can't print anything on the console.
Here it is the description of the problem:
Implement a javascript function that accepts an array containing an integer N and uses an expression to check if given N is prime (i.e. it is divisible without remainder only to itself and 1).
var n = ['2'];
function isPrime(n) {
if (n < 2) {
return false;
}
var isPrime = true;
for(var i = 2; i < Math.sqrt(n); i += 1) {
if (n % i === 0) {
isPrime = false;
}
}
return isPrime;
}
return isPrime(n);
There are couple errors in your code.
First, you need to check for every integer between 2 and Math.sqrt(n) inclusively. Your current code returns true for 4.
I don't think this is in a function, so you need to omit return from return isPrime(n) and replace it with a function wich prints out the return value of the funnction, like alert or console.log.
n is not a number, it's an array. You need to either make n a number, or call the function with isPrime(n[0]).
The correct code is
var n = 2;
function isPrime(n) {
if (n < 2) {
return false;
}
var isPrime = true;
for(var i = 2; i <= Math.sqrt(n); i += 1) {
if (n % i === 0) {
isPrime = false;
}
}
return isPrime;
}
alert(isPrime(n));
Note: You can change n += 1 to n++, and it works the same way.
n is an array, you want to access first element in the array and convert it to number first.
try replacing
return isPrime(n);
with
return isPrime(parseInt(n[0],10));
Your for-loop condition also needs a little modification
for(var i = 2; i <= Math.sqrt(n); i += 1) { //observe that i is not <= Math.sqrt(n)
A couple of little errors:
var n = 2;//<--no need to put n in an array
function isPrime(n) {
if (n < 2) {
return false;
}
var isPrime = true;
for(var i = 2; i < Math.sqrt(n); i += 1) {
if (n % i === 0) {
isPrime = false;
}
}
return isPrime;
}
isPrime(n);//<--no need for "return"
As to no output being printed, it is because you need to use console.log.
Replace return isPrime(n); with console.log(isPrime(n));.
Full working code:
var n = ['2', '3', '4', '5', '6', '7']; // you can use as many values as you want
function isPrime(n) {
if (n < 2) {
return false;
}
var isPrime = true;
for (var i = 2; i <= Math.sqrt(n); i += 1) { // Thanks to gurvinder372's comment
if (n % i === 0) {
isPrime = false;
}
}
return isPrime;
}
n.forEach(function(value) { // this is so you can iterate your array with js
console.log('is ' + value + ' prime or not? ' + isPrime(value)); // this so you can print a message in the console
});
/*
// Another approach of parsing the data, uncomment this piece of code and comment the one above to see it in action (both will give the same result)
for (index = 0; index < n.length; ++index) {
console.log('is ' + n[index] + ' prime or not? ' + isPrime(n[index])); // this so you can print a message in the console
}
*/

Euler #3 javascript error

I know this may get downvoted but I've been really frustrated for 24 hours and looking at other Euler 3 threads hasn't helped me solve this. Can someone help with my code? I think I'm very close.
function is_prime(num) {
if (isNaN(num)) return false;
for (i=2; i<=Math.sqrt(num); i++) {
if (num % i === 0) {
return false;
}
else {
return true;
}
}
}
// This above part CORRECTLY finds whether a number is prime or not. Problem lies with below part.
var holder = 0;
function getBiggestPrime (end) {
for (i=2; i<=Math.sqrt(end); i++) {
while (is_prime(i) && (end%i===0))
holder = i;
return holder;
}
}
getBiggestPrime(13195);
console.log(holder);
The first method is not correct. The corrected version will be:
<script>
function is_prime(num) {
if (isNaN(num)) return false;
for (var i=2; i<=Math.sqrt(num); i++) {
if (num % i === 0) {
return false;
}
}
return true;
}
//Same problem with the second method - the return should be after the for-loop ends (also, change the while to if):
function getBiggestPrime (end) {
var holder = end;
for (var i=2; i<=Math.sqrt(end)+1; i++) {
if (is_prime(i) && (end%i===0))
holder = i;
}
return holder;
}
A link to the demo.
About the second part (if I understand correctly what you're looking for) you should start from:
i = Math.sqrt(end) and go down with i-- until you find the biggest prime.

Javascript prime number error?

Could someone please help me find the error in this code? I'm reading through what I came up with and it logically makes sense to me each step of the way, but it's not producing the desired result. At the end I test calling the function with 6.
function is_prime(num) {
if (isNaN(num)) return false;
var numFactors = 0;
for (i=1; i<=num; i++) {
if (num % i === 0) {
numFactors += 1;
}
return numFactors;
}
if (numFactors === 2) {
return true;
}
else {
return false;
}
}
console.log(is_prime(6));
You are returning the function from inside the forloop.
So it never hits the other statements
Check Fiddle
I believe the problem you are having is that you are potentially returning your numFactors too early:
for (i=1; i<=num; i++) {
if (num % i === 0) {
numFactors += 1;
}
return numFactors;
}
Here, you are returning the numFactors at the end of your first loop, so it never actually finishes the full test.
Counting factors is not a right approach -
use this -
function is_prime(num) {
if (isNaN(num)) return false;
var k = Math.sqrt(num);
for (i=2; i<=k; i++) {
if(num%i===0)return false;
}
return false;
}
console.log(is_prime(6));
As you are stepping through all numbers from 1 to num for factrorization you might as well pick them up as factors and get a bit more out of your function:
function fact(num) {
if (isNaN(num)) return false;
var Factors=[];
for (i=1; i<=num; i++) {
if (num % i == 0) Factors.push(i)
}
return Factors
}
console.log('factors: '+fact(27));
console.log('is prime: '+fact(27).length===2);

sorting an alphanum array using jQuery

i am trying to compare two arrays containing alphabets and numbers using jquery but a call to the function does nothing.
jQuery.compare = function (string2,string1) {
alert("comparing")
var i, j;
for ( i = 0, j=0; i < string2.length|| j<string1.length; ++i,++j) {
var n1 = 0,n2=0;
if ($.isNumeric(string2[i+1])) {
num=string2[i]
while ($.isNumeric(string2[i+1])) {
i++;
num += string2[i]
}
n1 = 1;
}
if ($.isNumeric(strin1[j])) {
num1 = string1[j]
while ($.isNumeric(string1[j+1])) {
j++;
num1 += string1[j]
}
n2 = 1;
}
if (n1 == 1) {
if (n2 = 1) {
if( parseInt(num1) - parseInt(num) !=0)
return parseInt(num1) - parseInt(num);
}
else
return 1;
}
else if (n2 = 1)
return -1;
else {
if(string2[i]-string1[j]!=0)
return string2[i]-string1[j]!=0;
}
}
if (j < string1.length)
return -1;
else
return 1;
}
Also i would like to know the best way to call this function. I would like to use something like string1.compare(string2) and replace string1 with 'this' in the above code fragment.
EDIT:
This is how i call the compare function.Here, colValues is an array of string.
$.fn.sort = function (colValues) {
alert("sorting")
var i;
for (i = 0; i < colValues.length; ++i) {
for (var j = 0; j < i - 1; ++j) {
alert("before compare")
if(colValues.compare(colValues[j],colValues[j + 1])) {
var temp = colValues[j];
colValues[j] = colValues[j + 1];
colValues[j + 1] = temp;
}
}
}
return colValues
}
if i replace
if(colValues.compare(colValues[j],colValues[j + 1]))
with
if(colValues[j]>colValues[j+1])
my sort function works.
i am a complete newbie at coding in jquery. There is probably some syntactical error. I dont really want any help with the algorithm just the syntax.
jsfiddle-checkout my code here
EDIT2:
i fixed everything thx to metadings.
heres what i had to change
1)
jQuery.compare
to
$.fn.compare
2)
if($.isNumeric(string2[i+1]))
to
if(jQuery.isNumeric(string2[i + 1]))
somehow the
while ($.isNumeric(string1[j+1]))
syntax worked without any changes
3)
parseInt(num1) - parseInt(num)
didnt work either as it returned NaN. To solve this problem i defined two var variables 'number1' and 'number2' and initialized them with 0's and assigned parseInt(num1) individually to the variables and then subtracted the new variables.

Categories

Resources