Finding the outlier in an array [closed] - javascript

Closed. This question is not reproducible or was caused by typos. It is not currently accepting answers.
This question was caused by a typo or a problem that can no longer be reproduced. While similar questions may be on-topic here, this one was resolved in a way less likely to help future readers.
Closed 2 years ago.
Improve this question
I have written a function which given an array full of all odd numbers and one even number, it returns the one even number. Given an array full of all even numbers and one odd number, it returns the one odd number.
ex : findOutlier([2,6,8,10,3]) will return 3, since it is the only odd number in the array
I have gotten this to work, but for some reason it will not work with certain large negative numbers? It returns undefined instead of the outlier number.
Here is my code:
function findOutlier(integers){
let testingForOdds = true;
let evenCounter = 0;
let oddCounter = 0;
for (let i = 0; i < 3; i++){
if (integers[i] % 2 === 0){
evenCounter = evenCounter + 1
if (evenCounter === 2){
testingForOdds = true;
}
}
else if (integers[i] % 2 === 1){
oddCounter = oddCounter + 1
if (oddCounter === 2){
testingForOdds = false;
}
}
}
if (testingForOdds){
for (let i = 0; i < integers.length; i++){
if (integers[i] % 2 === 1){
return integers[i]
}
}
} else {
for (let i = 0; i < integers.length; i++){
if (integers[i] % 2 === 0){
return integers[i]
}
}
}
}
findOutlier([-100000000007, 2602, 36]);
For some reason, findOutlier([-100000000007, 2602, 36]); returns undefined. However, findOutlier([2,6,8,10,3]) will successfully return 3. Why is this happening?

As Michael has pointed out, you get the issue because -100000000007 % 2 evaluates to -1. As a side note, you can optimize your logic to reduce the number of comparisons done like so:
function findOutlier(arr) {
let isEven = true;
const a = arr[0];
const b = arr[1];
if (([-1, 1].includes(a % 2) && [-1, 1].includes(b % 2))) {
isEven = false;
} else if (!(a % 2 === 0 && b % 2 === 0)) {
const c = arr[2];
if (c % 2 === 1) isEven = false;
}
for (let i = 0; i < arr.length; i += 1) {
const even = arr[i] % 2 === 0;
if (even !== isEven) return arr[i];
}
}

Related

How to trace/understand an algorithm given the JavaScript source code? [closed]

Closed. This question needs to be more focused. It is not currently accepting answers.
Want to improve this question? Update the question so it focuses on one problem only by editing this post.
Closed 18 days ago.
Improve this question
I am trying to gain some JavaScript knowledge and I found this code challenge and its solution, but I don't quite understand the solution. I would like to get an explanation.
The Problem
2520 is the smallest number that can be divided by each of the numbers from 1 to 10 without any remainder. What is the smallest positive number that is evenly divisible by all of the numbers from 1 to n? (smallestMult(5) should return 60)
The Solution
function smallestMulT(num) {
let res = 0;
let i = 1;
let found = false;
while (found === false) {
res += num;
while (res % i === 0 && i <= num) {
if (i === num) {
found = true;
}
i++;
}
i = 1;
}
return res;
}
The solution works, I just want to understand how it works. For example, why was it necessary to repeat i = 1; inside the while loop?
Its because the function checks for each number between 1 and x (num) if its divisible by that number.
So what it basically does when you put in 5 is:
It checks if 1 is divisible by 1 (i)
it checks if 1 is divisible by 2 (i)
it checks if 1 is divisible by 3 (i)
it checks if 1 is divisible by 4 (i)
it checks if 1 is divisible by 5 (i)
Sets i back to 1
checks next number
It checks if 2 is divisible by 1 (i)
and so on....
function smallestMulT (num){
let res = 0;
let i = 1;
let found = false;
while (found === false) {
res += num;
while (res % i === 0 && i <= num) {
if (i === num) {
found = true;
};
i++;
};
i = 1;
};
return res;
};
console.log(smallestMulT(5));

Hirsch Array Function Returning 0 instead of desired output

Trying my hardest to improve but it's been almost 3 hours and I still haven't figured out why this function returns a bunch of 0's
I'm suppose to check the hirsch number of a given array. It is the greatest number n in which n is n times greater than or equal to the values of the array.
This is what I have so far
// Hirsch Number given an array with each space in the array represents a paper and each number represents the amount of citations for a given paper.
function hirschNumber(numberArray)
{
let hirschIndex = 0;
let continueCheck = true;
for (i=0; i < numberArray.length; i++)
{
let count = 0;
let citationsTreshold = hirschIndex + 1
for (i=0; i < numberArray.length; i++)
{
if (citationsTreshold <= numberArray[i])
{
count++
}
if (numberArray.length == (i - 1))
{
if (count >= citationsTreshold)
{
hirschIndex++
}
else if (count < citationsTreshold)
{
continueCheck = false
return
}
}
if (continueCheck = false)
{
return hirschIndex
}
else
{
console.log(hirschIndex)
}
}
if (continueCheck = false)
{
return hirschIndex
}
else {
console.log(hirschIndex)
};
};
return hirschIndex;
};
console.log(hirschNumber([3,0,6,1,5]));

Why does this function return undefined, even though the variable exists? [duplicate]

This question already has answers here:
Recursive function returns undefined
(3 answers)
Closed 2 years ago.
I've tried to console.log() pretty much everything at this point. There's probably a simple reason that I'm missing, but I really can't put my mind on it. Here is the code:
const primes = [];
//Function to determine if a number is prime
function isPrime(num) {
for (let i = 2; i < num; i++)
if (num % i === 0 && num > 1) return false;
return true;
}
//Cycling through the number up until 100 and adding the primes to the array
for (let i = 0; i < 100; i++) {
if (isPrime(i)) primes.push(i);
}
let min = 0, max = primes.length;
//Binary search algorithm
function binarySearch(array, target) {
let guess = Math.floor((min + max) / 2);
if (array[guess] === target) return guess;
if (array[guess] < target) min = guess + 1;
else max = guess - 1;
binarySearch(array,target);
}
console.log(binarySearch(primes, 3));
I don't understand why i get undefined as the output even though there is a return function that is supposed to return the variable guess.
Most probably you are getting undefined because you forgot to return from binarySearch function.
Try as the following:
function binarySearch(array, target) {
// ... implementation
return binarySearch(array,target);
}
I hope this helps!

Factors function obtain least common multiple or prime number [closed]

Closed. This question needs to be more focused. It is not currently accepting answers.
Want to improve this question? Update the question so it focuses on one problem only by editing this post.
Closed 3 years ago.
Improve this question
In the factors function, we seek to obtain the minimum denominators of the number passed by parameter or the exact prime numbers of the given number, if we multiply all the minimum denominators the result is the number itself.
factors(4) = [2,2] / 2x2 = 4
factors(18) = [2,3,3] / 2x3x3 = 18
factors(3)=[3]
For the problem i got to use recursive aproach.
You can first create a function generate primes in a range. Then keep dividing the number by the min prime number if the number is divisible and add that prime number to result array.
function isPrime(num){
if(num === 2) return true;
if(num === 3) return true;
if(num % 2 === 0 || num % 3 === 0) return false;
for(let i = 5; i < Math.sqrt(num) + 1; i += 6){
if(num % i === 0 || num % (i + 2) === 0) return false;
}
return true
}
function createPrimes(range){
let res = [];
for(let i = 2; i < range; i++){
if(isPrime(i)){
res.push(i)
}
}
return res;
}
function factors(num){
if(isPrime(num)) return [num];
let primes = createPrimes(num);
let res = []
while(num !== 1){
for(let p of primes){
if(num % p === 0){
res.push(p);
num /= p;
}
}
}
return res;
}
console.log(factors(18))
console.log(factors(4))
console.log(factors(3))

adding a number to an array in JavaScript [closed]

Closed. This question is not reproducible or was caused by typos. It is not currently accepting answers.
This question was caused by a typo or a problem that can no longer be reproduced. While similar questions may be on-topic here, this one was resolved in a way less likely to help future readers.
Closed 6 years ago.
Improve this question
I want to add a number to an array in JS, but only if the number is a multiple of three. Here's my code:
var numbers = [];
for (i = 1; i <= 200; i++) {
if i % 3 === 0 {
numbers.push(i);
}
}
alert(numbers);
But the code does not print anything. It works fine without the if statement though, when I just add the numbers between 1 and 200...
Can you find the error?
Thanks!
The quick fix is if (i % 3 === 0) {
But why don't you write for (i = 3; i <= 200; i += 3) instead and remove the modulus check?
You need some brackets for the if statement.
var numbers = [];
for (i = 1; i <= 200; i++) {
if (i % 3 === 0) {
// ^ ^
numbers.push(i);
}
}
console.log(numbers);
var numbers = [];
for (var i = 1; i <= 200;i++) {
if (i % 3 == 0) { // () execution brackets necessary
numbers.push(i);
}
}
// better way to do
var numbers = [];
var i = 3; // better to declare it here
for (; i <= 200; i += 3) {
// if (i % 3 == 0) { not required as Bathsheba's answer
numbers.push(i);
// }
}

Categories

Resources