More efficient solution than many loop iterations with JavaScript - javascript

I have written an algorithm that returns the smallest common multiple of two integers that can be evenly divided by both, and can also be evenly divided by all sequential integers in the range between the initial two integers.
My algorithm works well for numbers where the smallest common multiple is a reasonably low number. For numbers where the smallest common multiple is a very large number (ex for [1,13] its 360360) my algorithm breaks because it is dependant on running more and more for loops.
I have posted my code below. It seems very long, but the problem section is near the beginning, and the rest of the code is added for context.
Is there a more efficient solution that running more and more loop iterations to return larger numbers?
// If mystery failure occurs, examine i & j loop limits.
function smallestCommons(arr) {
var firstArray = [];
var secondArray = [];
var commonNums = [];
var sequential = [];
// Sort the array from lowest to highest
arr.sort(function(a,b) {
return a-b;
});
// Assing arr values to vars so that original data stays intact
var array0 = arr[0];
var array1 = arr[1];
//console.log(arr);
// Find all multiples of both inputs.
// THIS IS THE PROBLEM SECTION
for (var i = 0; i < 1000000; i++) {
firstArray.push(arr[0] * (i+1));
}
for (var j = 0; j < 1000000; j++) {
secondArray.push(arr[1] * (j+1));
}
// THAT WAS THE PROBLEM SECTION
// Find common multiples.
for (var k = 0; k < firstArray.length; k++) {
for (var l = 0; l < secondArray.length; l++) {
if (firstArray[k] == secondArray[l]) {
commonNums.push(firstArray[k]);
} // End if.
} // End l loop.
} // End k loop.
// Find sequential numbers in range between input parameters.
for (var m = 0; m < array1; m++) {
if (array0 + 1 !== array1) {
sequential.push(array0 += 1);
} // End of if.
} // End of m loop.
// Find commonNums divisible by arr && sequential to produce a whole number.
// commonNums [ 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75]
// arr [1,5]
// sequential [ 2, 3, 4 ]
for (var n = 0; n < commonNums.length; n++) {
var match = true;
for (var o = 0; o < sequential.length; o++) {
for (var p = 0; p < arr.length; p++) {
if (commonNums[n] % arr[p] !== 0 || commonNums[n] % sequential[o] !== 0) {
match = false;
}
}
}
if(match) {
console.log(commonNums[n]);
}
} // End of n loop.
} // End function.
smallestCommons([1,13]);

What you do does not make a lot of sense and is really hard to read. All you need to know to solve this problem is a little bit of math. And this math tells you that:
lcm(a, b) = a * b / gcd(a, b)
lcm(a,b,c) = lcm(a,lcm(b,c))
which translates into something like this:
function gcd(a, b){
while (b !== 0) {
var tmp = a;
a = b;
b = tmp % b;
}
return a
}
function lcm(a, b){
return a * b / gcd(a, b);
}
function lcm_arr(arr){
var res = 1;
for (var i = 0; i < arr.length; i++){
res = lcm(res, arr[i]);
}
return res;
}
which gives you your results in less than a second:
lcm_arr([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13])

Sounds like a good place to use recursion, because you have a solid base case when the number difference is one (in your example smallestCommons([12,13]); or smallestCommons([1,2]); depending on the direction), and the rest (sequentials) would be incrementing the smaller or decrementing the bigger number in recursive calls. That could save you quite a few CPU cycles.

Related

Largest Triple Products without using sort?

I implemented the Largest Triple Products algorithm, but I use sort which makes my time complexity O(nlogn). Is there a way to implement it without a temporary sorted array?
The problem:
You're given a list of n integers arr[0..(n-1)]. You must compute a list output[0..(n-1)] such that, for each index i (between 0 and n-1, inclusive), output[i] is equal to the product of the three largest elements out of arr[0..i] (or equal to -1 if i < 2, as arr[0..i] then includes fewer than three elements).
Note that the three largest elements used to form any product may have the same values as one another, but they must be at different indices in arr.
Example:
var arr_2 = [2, 4, 7, 1, 5, 3];
var expected_2 = [-1, -1, 56, 56, 140, 140];
My solution:
function findMaxProduct(arr) {
// Write your code here
if(!arr || arr.length === 0) return [];
let helper = arr.slice();
helper.sort((a,b)=>a-b); // THIS IS THE SORT
let ans = [];
let prod = 1;
for(let i=0; i<arr.length; i++) {
if(i < 2) {
prod *= arr[i];
ans.push(-1);
}
else {
if(i === 3) {
prod *= arr[i];
ans.push(prod);
} else if(arr[i] < helper[0]) {
ans.push(prod);
} else {
const min = helper.shift();
prod /= min;
prod *= arr[i];
ans.push(prod);
}
}
}
return ans;
}
Thanks
You don't need to sort it. You just maintain an array of the largest three elements at each index.
For the first three elements it is simple you just assign the product of them to the third element in the result.
For the next elements, you add the current element to the three-largest-element-array and sort it and take the elements from 1 to 3 ( the largest three ) and assign the product of those at that index in result array. Then update the three-element-array with largest three.
Complexity :
This sort and slice of three-element-array should be O(1) because each time atmost 4 elements are there in the array.
Overall complexity is O(n).
You can do it as follows :
function findMaxProduct(arr) {
if(!arr) return [];
if (arr.length < 3) return arr.slice().fill(-1)
let t = arr.slice(0,3)
let ans = arr.slice().fill(-1,0,2) //fill first two with -1
ans[2] = t[0]*t[1]*t[2];
for(let i=3; i<arr.length; i++) {
t.push(arr[i]);
t = t.sort().slice(1,4);
ans[i] = t[0]*t[1]*t[2];
}
return ans;
}
I am keeping the array ordered (manually). Then just get the first 3 elements.
function findMaxProduct(arr) {
let results = [];
let heap = [];
for (let i = 0; i < arr.length; i++) {
// Insert the new element in the correct position
for (let j = 0; j < heap.length; j++) {
if (arr[i] >= heap[j]) {
heap.splice(j, 0, arr[i]);
break;
}
}
// No position found, insert at the end
if (heap.length != i + 1) {
heap.push(arr[i]);
}
if (i < 2) {
results.push(-1);
} else {
results.push(heap[0] * heap[1] * heap[2]);
}
}
return results;
}
You can make an array that holds three currently largest integers, and update that array as you passing through original array. That's how you will always have three currently largest numbers and you will be able to solve this with O(n) time complexity.
I think there's a faster and more efficient way to go about this. This is a similar thought process as #Q2Learn, using Python; just faster:
def findMaxProduct(arr):
#create a copy of arr
solution = arr.copy()
# make first 2 elements -1
for i in range(0,2):
solution[i] = -1
#for each item in copy starting from index 2, multiply item from 2 indices b'4 (notice how each index of arr being multiplied is reduced by 2, 1 and then 0, to accommodate each move)
for i in range(2, len(arr)):
solution[i] = arr[i-2] * arr[i-1] * arr[i]
return solution
check = findMaxProduct(arr)
print(check)
Single Scan Algorithm O(n)
We don't need to necessarily sort the given array to find the maximum product. Instead, we can only find the three largest values (x, y, z) in the given stage of iteration:
JavaScript:
function findMaxProduct(arr) {
let reults = []
let x = 0
let y = 0
let z = 0
for(let i=0; i<arr.length; i++) {
n = arr[i]
if (n > x) {
z = y
y = x
x = n
}
if (n < x && n > y) {
z = y
y = n
}
if (n < y && n > z) {
z = n
}
ans = x*y*z
if (ans === 0) {
results.push(-1)
} else {
results.push(ans)
}
return ans;
}
Python:
def findMaxProduct(arr):
results = []
if not arr:
return []
x = 0
y = 0
z = 0
for i, n in enumerate(arr):
if n > x:
z = y
y = x
x = n
if n < x and n > y:
z = y
y = n
if n < y and n > z:
z = n
ans = x*y*z
if ans == 0:
results.append(-1)
else:
results.append(ans)
print(results)
public int[] LargestTripleProducts(int[] input)
{
var ansArr = new int[input.Length];
var firstLargetst = input[0];
var secondLargetst = input[1];
ansArr[0] = ansArr[1] = -1;
for (int i = 2; i < input.Length; i++)
{
ansArr[i] = firstLargetst * secondLargetst * input[i];
if (firstLargetst < input[i] && firstLargetst < secondLargetst)
{
firstLargetst= input[i];
continue;
}
if (secondLargetst < input[i] && secondLargetst < firstLargetst)
{
secondLargetst= input[i];
}
}
return ansArr;
}
Python solution based on #SomeDude answer above. See explanation there.
def findMaxProduct(arr):
if not arr:
return None
if len(arr) < 3:
for i in range(len(arr)):
arr[i] = -1
return arr
three_largest_elem = arr[0:3]
answer = arr.copy()
for i in range(0, 2):
answer[i] = -1
answer[2] = three_largest_elem[0] * three_largest_elem[1] * three_largest_elem[2]
for i in range(3, len(arr)):
three_largest_elem.append(arr[i])
three_largest_elem = sorted(three_largest_elem)
three_largest_elem = three_largest_elem[1:4]
answer[i] = three_largest_elem[0] * three_largest_elem[1] * three_largest_elem[2]
return answer #Time: O(1) n <= 4, to Overall O(n) | Space: O(1)
Python has it's in-built package heapq, look at it for it.
Credit: Martin
> Helper function for any type of calculations
import math
> Heap algorithm
import heapq
> Create empty list to append output values
output = []
def findMaxProduct(arr):
out = []
h = []
for e in arr:
heapq.heappush(h, e)
if len(h) < 3:
out.append(-1)
else:
if len(h) > 3:
heapq.heappop(h)
out.append(h[0] * h[1] * h[2])
return out
Hope this helps!

Javascript Program for find common elements in two array

Recently I had an interview question as follows:
Let us consider we have two sorted arrays of different length. Need to find the common elements in two arrays.
var a=[1,2,3,4,5,6,7,8,9,10];
var b = [2,4,5,7,11,15];
for(var i=0;i<a.length;i++){
for(var j=0;j<b.length;j++){
if(a[i]==b[j]){
console.log(a[i],b[j])
}
}
}
I wrote like above. The interviewer said let now assume a have 2000 elements and b have 3000 elements. Then how you wrote in a more efficient way?
Please explain your answers with sample code. So I can understand more clearly.
The easiest way!!
var a = [1,2,3,4,5,6,7,8,9,10];
var b = [2,4,5,7,11,15];
for(let i of a){
if(b.includes(i)){
console.log(i)
}
}
--------- OR --------------
var c = a.filter(value => b.includes(value))
console.log(c)
Since the arrays are sorted, binary search is the key.
Basically, you're searching an item in an array.
You compare the item against the middle index of the array (length / 2)
If both are equal, you found it.
If item is inferior than the one at the middle index of the array, compare item against the index being at index length / 4 -> ((0 + length / 2) / 2), if it's inferior, at index ((length / 2) + length) / 2 (the middle of upper part) and so on.
That way, if in example you have to search item in a 40 000 length array, at worse, you find out that item isn't in the array with 16 comparisons :
I'm searching for "something" in an array with 40 000 indexes, minimum index where I can find it is 0, the maximum is 39999.
"something" > arr[20000]. Let's assume that. I know that now the minimum index to search is 20001 and the maximum is 39999. I'm now searching for the middle one, (20000 + 39999) / 2.
Now, "something" < arr[30000], it limits the search from indexes 20001 to 29999. (20000 + 30000) / 2 = 25000.
"something" > arr[25000], I have to search from 25001 to 29999. (25000 + 30000) / 2 = 27500
"something" < arr[27500], I have to search from 25001 to 27499. (25000 + 27500) / 2 = 26250
"something" > arr[26250], I have to search from 26251 to 27499. (26250 + 27500) / 2 = 26875
"something" < arr[26875], I have to search from 26251 to 26874. (26250 + 26875) / 2 = 26563
And so on... Of course, you have to round and stuff to avoid floating indexes
var iteration = 1;
function bSearch(item, arr)
{
var minimumIndex = 0;
var maximumIndex = arr.length - 1;
var index = Math.round((minimumIndex + maximumIndex) / 2);
while (true)
{
++iteration;
if (item == arr[index])
{
arr.splice(0, minimumIndex);
return (true);
}
if (minimumIndex == maximumIndex)
{
arr.splice(0, minimumIndex);
return (false);
}
if (item < arr[index])
{
maximumIndex = index - 1;
index = Math.ceil((minimumIndex + maximumIndex) / 2);
}
else
{
minimumIndex = index + 1;
index = Math.floor((minimumIndex + maximumIndex) / 2);
}
}
}
var arrA;
var arrB;
for (var i = 0; i < arrA.length; ++i)
{
if (bSearch(arrA[i], arrB))
console.log(arrA[i]);
}
console.log("number of iterations : " + iteration);
You could use a nested approach by checking the index of each array and find the values by incrementing the indices. If equal values are found, increment both indices.
Time complexity: max. O(n+m), where n is the length of array a and m is the length of array b.
var a = [1, 2, 3, 4, 5, 6, 8, 10, 11, 15], // left side
b = [3, 7, 8, 11, 12, 13, 15, 17], // right side
i = 0, // index for a
j = 0; // index for b
while (i < a.length && j < b.length) { // prevent running forever
while (a[i] < b[j]) { // check left side
++i; // increment index
}
while (b[j] < a[i]) { // check right side
++j; // increment
}
if (a[i] === b[j]) { // check equalness
console.log(a[i], b[j]); // output or collect
++i; // increment indices
++j;
}
}
since both arrays are sorted just save the lastest match index . then start your inner loop from this index .
var lastMatchedIndex = 0;
for(var i=0;i<a.length;i++){
for(var j=lastMatchIndex ;j<b.length;j++){
if(a[i]==b[j]){
console.log(a[i],b[j]);
lastMatchedIndex = j;
break;
}
}
}
=================
UPDATE :
As Xufox mentioned in comments if a[i] is lower than b[i] then u have break loop since it has no point to continue the loop .
var lastMatchedIndex = 0;
for(var i=0;i<a.length;i++){
if(a[i]<b[i]){
break;
}
for(var j=lastMatchIndex ;j<b.length;j++){
if(a[i]==b[j]){
console.log(a[i],b[j]);
lastMatchedIndex = j;
break;
}
if(a[i]<b[j]){
lastMatchedIndex = j;
break;
}
}
}
An optimal strategy would be one where you minimize the amount of comparisons and array readings.
Theoretically what you want is to alternate which list you are progressing through so as to avoid unnecessary comparisons. Giving that the lists are sorted we know that no number to the left of any index in a list can ever be smaller than the current index.
Assuming the following list A = [1,5], list B = [1,1,3,4,5,6] and indexes a and b both starting at 0, you would want your code to go like this:
A[a] == 1, B[b] == 1
A[a] == B[b] --> add indexes to results and increase b (B[b] == 1)
A[a] == B[b] --> add indexes to results and increase b (B[b] == 3)
A[a] < B[b] --> don't add indexes to results and increase a (A[a] == 5)
A[a] > B[b] --> don't add indexes to results and increase b (B[b] == 4)
A[a] > B[b] --> don't add indexes to results and increase b (B[b] == 5)
A[a] == B[b] --> add indexes to results and increase b (B[b] == 6)
A[a] < B[b] --> don't add indexes to results and increase a (A is at the end, so we terminate and return results)
Below is my JavaScript performing the above described algorithm:
//Parameters
var listA = [];
var listB = [];
//Parameter initialization
(function populateListA() {
var value = 0;
while (listA.length < 200) {
listA.push(value);
value += Math.round(Math.random());
}
})();
(function populateListB() {
var value = 0;
while (listB.length < 300) {
listB.push(value);
value += Math.round(Math.random());
}
})();
//Searcher function
function findCommon(listA, listB) {
//List of results to return
var results = [];
//Initialize indexes
var indexA = 0;
var indexB = 0;
//Loop through list a
while (indexA < listA.length) {
//Get value of A
var valueA = listA[indexA];
var result_1 = void 0;
//Get last result or make a first result
if (results.length < 1) {
result_1 = {
value: valueA,
indexesInA: [],
indexesInB: []
};
results.push(result_1);
}
else {
result_1 = results[results.length - 1];
}
//If higher than last result, make new result
//Push index to result
if (result_1.value < valueA) {
//Make new object
result_1 = {
value: valueA,
indexesInA: [indexA],
indexesInB: []
};
//Push to list
results.push(result_1);
}
else {
//Add indexA to list
result_1.indexesInA.push(indexA);
}
//Loop through list b
while (indexB < listB.length) {
//Get value of B
var valueB = listB[indexB];
//If b is less than a, move up list b
if (valueB < valueA) {
indexB++;
continue;
}
//If b is greather than a, break and move up list a
if (valueB > valueA) {
break;
}
//If b matches a, append index to result
result_1.indexesInB.push(indexB);
//Move up list B
indexB++;
}
//Move up list A
indexA++;
}
//Return all results with values in both lines
return results.filter(function (result) { return result.indexesInB.length > 0; });
}
//Run
var result = findCommon(listA, listB);
//Output
console.log(result);
We could iterate one array and find the duplicate in the other, but each time we find a match, we move to the matched element + 1 for the next iteration in the nested loop. It works because both arrays are sorted. So each match the array to compare is shorter (from left to right).
We could also break the nested loop when the element of the second array is greater than the first (it's shorter from right to left), because we will never find a match (since the array is ordered, there are only greater values remaining), here and example finding duplicates in two arrays of 10k elements, takes roughly 15 miliseconds:
var arr = [];
var arr2 = [];
for(let i = 0; i<9999; i++){
arr.push(i);
arr2.push(i+4999)
}
var k = 0;//<-- the index we start to compare
var res = [];
for (let i = 0; i < arr2.length; i++) {
for (let j = k; j < arr.length; j++) {
if (arr2[i] === arr[j]) {
res.push(arr2[i]);
k = j + 1;//<-- updates the index
break;
} else if (arr[j] > arr2[i]) {//<-- there is no need to keep going
break;
}
}
}
console.log(res.length)
I did not print res, because it has 5000 elements.
You can build a hash with first array (irrespective of they are sorted or not) and iterate the second array and check for existence in the hash!
let arr1 = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150],
arr2 = [15,30,45,60,75,90,105,120,135,150,165]
hash = arr1.reduce((h,e)=> (h[e]=1, h), {}), //iterate first array once
common = arr2.filter(v=>hash[v]); //iterate secod array once
console.log('Cpmmon elements: ', common);
Not sure but this may help
let num1 = [2, 3, 6, 6, 5];
let num2 = [1, 3, 6, 4];
var array3 = num1.filter((x) => {
return num2.indexOf(x) != -1
})
console.log(array3);
I sometimes find it convenient to turn one list into a hashset.
var hashA = {};
for(var i=0; i<a.length; i++) {hashA[a[i]] = true;}
then you can search the hashset.
for(var i=0; i<b.length; i++) {if(hashA[b[i]]) {console.log(b[i]);}}
This isnt as fast as the binary search of course because you have to take time to build the hashset, but its not bad, and if you need to keep the list and do a lot of future searching it might be the best option. Also, I know javascript objects arent really just hashsets, its complicated, but it mostly works pretty well.
Honestly though, for 3000 items I wouldnt change the code. Thats still not big enough to be an issue. That will run in like 30ms. So it also depends on how often its going to run. Once an hour? Forget about it. Once per millisecond? Definitely gotta optimize that.
if we are talking about the algorithm to find common elements between two array, then here is my opinion.
function common(arr1, arr2) {
var newArr = [];
newArr = arr1.filter(function(v){ return arr2.indexOf(v) >= 0;})
newArr.concat(arr2.filter(function(v){ return newArr.indexOf(v) >= 0;}));
return newArr;
}
but if you are going to think on performance also, then you should try another ways also.
first check the performance for javascript loop here, it will help you to figure out best way
https://dzone.com/articles/performance-check-on-different-type-of-for-loops-a
https://hackernoon.com/javascript-performance-test-for-vs-for-each-vs-map-reduce-filter-find-32c1113f19d7

Find the largest subarray length with delimiter

I'm trying to solve this problem:
Given two parameters: an array a and integer i, find the largest array length where the sum of all elements is <= i.
For example, having the following array: [3, 1, 2, 1], and i = 4, all the combinations that are <= i are: [3], [1], [2], [3, 1], [1, 2], [1, 2, 1]. The largest subarray is [1, 2, 1], so the return of the function should be 3 (the array length).
What would be a an efficient approach to solve this problem?
This is my algorithm until now, but I know that I'm missing something:
function sumArray(a) {
return a.reduce((a, b) => a + b, 0)
}
function maxLength(a, i) {
let max = 0
let array = [a[0]]
a.splice(1, a.length).forEach(number => {
array.push(number)
if (sumArray(array) <= k) max = array.length
else array.splice(array.indexOf(Math.max.apply(null, array)), 1)
})
return max
}
Here's how I would do it.
First, we'll get the longest sub-array by taking only the smallest elements out of the original, because that way the sum will be smallest possible as we go along. So:
const original = [3, 1, 2, 1];
const maxsum = 4;
// To make sure we take only the smallest, let's just
// sort the array straight away. That way the smallest
// element will always be the first.
// The .slice call is to make a copy, so we don't change
// the original:
const sorted = original.slice().sort();
// Here's your sum function. Looks legit.
function sumArray(a) {
return a.reduce((a, b) => a + b, 0)
}
// Now take items from the front of the sorted array and
// put them in the new array until either the original is
// empty or the max is reached.
let subarray = [];
while (sorted.length) {
// Get the next smallest element. It's always the
// first one because of the sort.
const smallest = sorted.shift();
// If the sum of what we have plus this new element
// is bigger than the max, our work is done:
if (sumArray(subarray) + smallest > maxsum) break;
// Otherwise, add it to our sub array and continue.
subarray.push(smallest)
}
// Once that loop has run, either we ran out of elements,
// or we hit the max. Either way, our job is done.
console.log("Original array:", original);
console.log("Maximal subset:", subarray);
console.log("Maximal subset length:", subarray.length);
Finally, if you want to get fancy, you can even do this with a single .reduce call:
const original = [3, 1, 2, 1];
const maxsum = 4;
const maximalSubset = original.slice().sort().reduce((subset, current) => {
if (subset.reduce((s, c) => s + c, 0) + current <= maxsum) subset.push(current);
return subset;
}, []);
console.log("Orignal:", original);
console.log("Maximal subset:", maximalSubset);
console.log("Maximal subset length:", maximalSubset.length);
Although while shorter, that second snippet has the downside that we have to iterate the entire array before we get the result, whereas the first one will stop once the maximum is reached.
EDIT:
It turns out that the subarray needs to be a continuous piece of the original, so changing the order of the original won't work since we need to make sure the result is a continuous slice of the original.
To do that, instead, just check each subslice of the array, and keep the best one:
let original = [74,659,931,273,545,879,924,710,441,166,493,43,988,504,328,730,841,613,304,170,710,158,561,934,100,279,817,336,98,827,513,268,811,634,980,150,580,822,968,673,394,337,486,746,229,92,195,358,2,154,709,945,669,491,125,197,531,904,723,667,550];
const maxsum = 22337;
function arraySum(arr) {
return arr.reduce((p, c) => p + c, 0);
}
// Double for loop will do the trick.
let bestSoFar = [];
for (let i = 0; i < original.length; i++) {
for (let j = i+1; j < original.length; j++) {
if (j-i > bestSoFar.length && arraySum(original.slice(i, j)) < maxsum) {
bestSoFar = original.slice(i, j);
}
}
}
console.log("Longest continuous subarray is:", bestSoFar.length);
A brute force approach is likely the best solution to this problem. Start at each entry and see how far you can go before arriving at a sum > i and if it's better than the best you've seen so far save it. I provided a sample Java solution below, I haven't actually run it so one or two of my indices may be off but I think you can get the gist. Runtime is O(n^2), memory is O(n) (both occurring with, for example, #getMaxSubArray(new int[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}, 1000000))
private int[] getMaxSubArray(int[] a, int i) {
int minIndex = 0;
int maxIndex = -1;
for (int j = 0; j < a.length; j+=1) {
if (a[j] > i) {
continue;
}
int sum = a[j];
for (int k = j + 1; k < a.length; k++) {
if (sum + a[k] > i) {
if ((k - 1) - j > maxIndex - minIndex) {
maxIndex = k - 1;
minIndex = j;
}
break;
}
sum += a[k];
}
}
if (maxIndex - minIndex < 0) {
return null;
}
int[] result = new int[maxIndex - minIndex + 1];
for (int p = minIndex; p <= maxIndex; p += 1) {
result[p - minIndex] = a[p];
}
return result;
}
This is my solution. It will return the max length of the subarray. Can you please take a look, guys?
function maxLength(a, k) {
const sortedArray = a.sort((i,j) => i - j);
let sum = 0;
let length = 0;
const subArray = [];
for (let i=0; i < sortedArray.length; i++) {
sum = sum + sortedArray[i];
if (sum <= k) {
length++;
subArray.push(sortedArray[i]);
} else {
return length;
}
}
return length;
}

Intersection of N sorted integer arrays with limit

Given N sorted arrays of integers (no duplicates), I'd like to calculate the first limit integers in their intersection.
For example, given the following arrays:
[2, 5, 7, 8, 10, 12, 13, 15, 20, 24]
[3, 4, 5, 6, 9, 10, 11, 17, 20]
[1, 2, 3, 5, 6, 10, 12, 20, 23, 29]
the intersection is [5, 10, 20], so if limit = 2, the result should be [5, 10].
The given arrays should not be mutated.
My attempt is below. Playground here.
Is there a more efficient (faster) way to achieve this?
Would appreciate a jsperf comparison.
function intersection(sortedArrays, limit) {
var arraysCount = sortedArrays.length;
var indices = sortedArrays.map(function(array) { return 0; });
var values, maxValue, valuesAreSame, reachedEnd, i, result = [];
while (true) {
reachedEnd = indices.some(function(index, i) {
return index === sortedArrays[i].length;
});
if (reachedEnd) {
return result;
}
values = sortedArrays.map(function(array, i) { return array[indices[i]]; });
valuesAreSame = values.every(function(value, i) { return value === values[0]; });
if (valuesAreSame) {
result[result.length] = values[0];
if (result.length === limit) {
return result;
}
for (i = 0; i < arraysCount; i++) {
indices[i]++;
}
} else {
maxValue = Math.max.apply(null, values);
for (i = 0; i < arraysCount; i++) {
if (values[i] < maxValue) {
indices[i]++;
}
}
}
}
}
console.log(intersection([[0, 3, 8, 11], [1, 3, 11, 15]], 1));
// => [3]
The first challenge is to make the function correct. Once it's correct, we can worry about the speed.
There are a few things which could trip-up a function like this:
NaN
Bad limits
Repeated numbers
Only 1 input array (or none at all)
Your original function can handle repeated numbers, such as [[9,9,9,9],[9,9,9]], but gets stuck in an infinite loop if any value is NaN, and handles a limit of 0 as if there were no limit at all.
Here's my (Mk3) attempt:
function intersection( arrs, limit ) {
var result = [], posns = [];
var j, v, next, n = arrs.length, count = 1;
if( !n || limit <= 0 ) {
return result; // nothing to do
}
if( n === 1 ) {
// special case needed because main loop cannot handle this
for( j = 0; j < arrs[0].length && result.length < limit; ++ j ) {
v = arrs[0][j];
if( v === v ) {
result.push( v );
}
}
return result;
}
for( j = 0; j < n; ++ j ) {
if( !arrs[j].length ) {
return result; // no intersection
}
posns[j] = 0;
}
next = arrs[n-1][0];
++ posns[n-1];
while( true ) {
for( j = 0; j < n; ++ j ) {
do {
if( posns[j] >= arrs[j].length ) {
return result; // ran out of values
}
v = arrs[j][posns[j]++];
} while( v < next || v !== v );
if( v !== next ) {
count = 1;
next = v;
} else if( (++ count) >= n ) {
result.push( next );
if( result.length >= limit ) {
return result; // limit reached
}
if( posns[j] >= arrs[j].length ) {
return result; // ran out of values
}
next = arrs[j][posns[j]++];
count = 1;
}
}
}
}
(fiddle: http://jsfiddle.net/kn2wz2sc/4/)
This works in much the same way as your original method, but with several optimisations. It always knows which number it is looking for next, and will quickly iterate through each array until it finds a number which is at least that big. If the number is too big, it will update the number it is looking for.
In Mk2 I took some inspiration from Casey's method of counting matches as it goes instead of checking from 0-n each time, which allows it to short-cut some comparisons (and since Casey is now using indices, both methods have become very similar). In Mk3 I've made some more micro-optimisations, incrementing the indexes eagerly so that it doesn't need an inner loop.
This is safe against all the criteria I listed above (it ignores NaN since NaN!=NaN and therefore will never be in the intersection), isn't limited to numbers, and will exit quickly once any limit is reached.
A jsperf shows that Mk3 is the fastest method so far: http://jsperf.com/sorted-intersect/5 (and it's still safe against duplicates and NaN).
Here's another algorithm, where the idea is that we count how many times we see each number. Once we see it arrs.length times, we know that it's in the intersection. If it's missing from even one list, it's not in the intersection, and we can skip to the next number in that list. It turns out to be a lot faster!
This method mutates the array, but is easier to read.
function intersection(arrs, limit) {
var intersections = [];
// Keep track of how many times we've seen the largest element seen so far.
var largest = -Infinity;
var count = 0;
while (intersections.length < limit) {
for (var i = 0; i < arrs.length; i++) {
// Drop elements less than `largest`.
while (arrs[i].length && arrs[i][0] < largest)
arrs[i].shift();
// Ignore repeated elements (not needed if you don't have repeated elements).
while (arrs[i].length >= 2 && arrs[i][0] == largest && arrs[i][1] == largest)
arrs[i].shift();
// If we ran out of elements, we're done.
if (!arrs[i].length)
return intersections;
// Look at the next element.
var next = arrs[i].shift();
if (next == largest)
count++;
else {
count = 1;
largest = next;
}
// Once we see it enough times, we can be sure it's in the intersection!
if (count == arrs.length)
intersections.push(largest);
}
}
return intersections;
}
This method doesn't, but it's harder to read.
function intersection(arrs, limit) {
var intersections = [];
var indices = [];
for (var i = 0; i < arrs.length; i++)
indices[i] = 0;
// Keep track of how many times we've seen the largest element seen so far.
var largest = -Infinity;
var count = 0;
while (intersections.length < limit) {
for (var i = 0; i < arrs.length; i++) {
// Skip past elements less than `largest`.
while (indices[i] < arrs[i].length && arrs[i][indices[i]] < largest)
indices[i]++;
// If we ran out of elements, we're done.
if (indices[i] >= arrs[i].length)
return intersections;
// Look at the next element.
var next = arrs[i][indices[i]++];
if (next == largest)
count++;
else {
count = 1;
largest = next;
}
// Once we see it enough times, we can be sure it's in the intersection!
if (count == arrs.length)
intersections.push(largest);
}
}
return intersections;
}
Faster (but by a long shot not as fast as the other answers):
function intersectMultiple(sortedArrays, limit) {
var set = {}, result = [],
a = sortedArrays.length,
l = Math.max.apply(null, sortedArrays.map(function (a) {
return a.length;
})), i, j, c = 0, val;
for (i = 0; i < l && c < limit; i++) {
for (j = 0; j < a && c < limit; j++) {
val = sortedArrays[j][i];
if (!set.hasOwnProperty(val)) set[val] = 0;
if (++set[val] === a) result[c++] = val;
}
};
return result;
}
and
var s = [
[2, 5, 7, 8, 10, 12, 13, 15, 20, 24],
[3, 4, 5, 6, 9, 10, 11, 17, 20],
[1, 2, 3, 5, 6, 10, 12, 20, 23, 29]
];
intersectMultiple(s, 2);
// [5, 10]
http://jsperf.com/intersect-multiple

Find the N'th highest number in unsorted array

Today in a interview, I was told to write a program which will output the nth highest number in the unsorted array,
I solved this using javascript, the program is as follows,
var fn50 = function(){
var reverseSort = function(myArray,highest){
var x = 0,
y = 0,
z = 0,
temp = 0,
totalNum = myArray.length, // total numbers in array
flag = false, // is the numbers sorted in reverse while iteration
isAchieved = false; // whether we achieved the nth highest
while(x < totalNum){
y = x + 1; // start comparing 'yth' number which is next to 'xth' number.
if(y < totalNum){
// start comparing 'xth' with the next number, and if 'xth' number less than its next position number, just swipe them
for(z = y; z < totalNum; z++){
if(myArray[x] < myArray[z]){
temp = myArray[z];
myArray[z] = myArray[x];
myArray[x] = temp;
flag = true; // if number swiping done ?
}else{
continue;
}
}
}
if(flag){
flag = false;
}else{
x++; // x holds the max number in series, now move to next position to find next highest number
if(x > highest){ // if x is what the desired max number which we want flag it and break the loop to escape further iteration.
isAchieved = true;
}
}
if(isAchieved){
break;
}
}
print(myArray[(highest - 1)]);
};
reverseSort([12,56,78,34,11,100,95],4); // passing the unsorted array of number's, and finding the 4th highest number
};
fn50();
I got the desired output i.e the answer is 56 from the above array which is the 4th highest number.
But the interviewer told for a better solution.
Can you tell me or give me a hint, how can there be a better solution.
Some data structure technique ?
Sorting and selecting the kth highest number needs O(n log(n)) time, where n is the number of elements. In the bibliography, there is the medians of medians algorithm, that allows us to select the kth highest or smallest in linear time, no matter what value k has. You could find out if the interviewer had this kind of algorithm in mind, if you asked if the desired element could be the median of the array. The median is the element at position n / 2, which is considered the hardest case.
But for an interview, it's a complicated algorithm. If k is in general small, you can apply the following algorithm, based on the structure of a heap. You convert the array into a heap in linear time. Then you extract k times the largest element. This will take O(n + k * log(n)) time, which for small k = ο(n / log(n) is linear.
Having k be as small as a constant number, like 4, has an even simpler linear algorithm. Every time we scan the array and remove the largest. This will take O(k * n) time and because k is constant, O(k * n) = O(n).
I tried to implement this with quickselect as JuniorCompressor suggested.
But I wonder if that is really the fastest possible way. I guess the calculation of the pivot could be made more efficient.
var nthLargest = function(list, n) {
var i, a = 0, b = list.length, m, pivot;
if(n < 1) throw new Error("n too small");
if(list.length < n) throw new Error("n too large");
list = list.slice(0);
var swap = function(list, a, b) {
var temp = list[a];
list[a] = list[b];
list[b] = temp;
}
//returns the index of the first element in the right sublist
var partition = function(list, pivot, a, b) {
b--;
while(a <= b) {
if(list[a] <= pivot) a++;
else if(list[b] > pivot) b--;
else swap(list, a, b);
}
return a;
}
while(b - a > 1) {
for(i = a, pivot = 0; i < b; i++) {
pivot += list[i];
}
pivot /= b-a;
m = partition(list, pivot, a, b);
if(b - m >= n) a = m; // select right sublist
else { // select left sublist
if(m === b) return list[a]; // all elements in sublist are identical
n -= b - m;
b = m;
}
}
if(n !== 1) throw new Error();
return list[a];
}
<script>
function nthlargest(array, highest) {
array.sort();
l=array.length;
if(highest>l)
return("undefined");
else
return(array[l-highest+1]);
}
document.write(nthlargest([23, 652, 43, 89, 23, 90, 99, 88], 2));
</script>
Sorting is the simplest way I can think of.
But it appears you created your own sorting implementation.
Why not use the Array.sort function?
function nthHighest(numbers, n) {
var sorted = numbers.sort(function (a, b) {
return a - b;
});
return sorted[sorted.length - n];
}
You could simplify the arithmetic by doing a reverse sort, which just means b - a instead of a - b, then you don't need to pull from the back, which is just a cosmetic improvement.
function nthHighest(numbers, n) {
var sorted = numbers.sort(function (a, b) {
return b - a;
});
return sorted[n - 1];
}
You could also iterate over the array once, copying each element into a new array in sorted order and again, taking the Nth to last element, using underscore to implement a binary search.
function nthHighest(numbers, n) {
var sorted = [];
numbers.forEach(function (number) {
sorted.splice(_.sortedIndex(sorted, number), 0, number);
});
return sorted[numbers.length - n];
}
But this is basically a spin on the same concept: sort and take N. This approach would also perform better with a linked list than a pure array due to the restructuring, but that can be a separate exercise.
I came up with my own solution as it goes:
const nthlargest = (arr, n) => {
let newArr = [arr[0]];
for (let index = 1; index < arr.length; index++) {
const element = arr[index];
// push to end
if (element > newArr[index - 1]) {
newArr.push(element);
} else {
let insertPos = 0;
// if greater than first and less than last
if (newArr[0] < element && element < newArr[index - 1]) {
for (let j = 0; j < newArr.length; j++) {
if (newArr[j] > element) {
insertPos = j;
}
}
}
//insert at specified pos
newArr.splice(insertPos, 0, element);
}
}
return newArr[n];
}
console.log(nthlargest([43, 56, 23, 89, 88, 90, 99, 652], 4));
// counting from 0
// 89
This is without sorting the original array else it would be much easier.
using the sort array method
function nthLargest(array, n){
array.sort(function(a, b) {
return b - a; //organises the array in descending organises
});
let i = n - 1; //i is the index of the nth largest number
console.log(array[i]);
}
def funcc(arr, n):
max=0
k=0
while (n>0):
n-=1
for i in arr:
if (i>max):
max=i
if(n>0):
arr.remove(max)
max=0
return max
a = [1,2,3,4,19,10,5,11,22,8]
k = funcc(a, 3)
print(k)
const data = [30, 8, 2, 350, 4, 63, 98];
let max = 0;
var nth = 5;
for (let i = 0; i < nth; i++) {
max = 0;
for (let j = 0; j < data.length; j++) {
if (data[j] > max) {
max = data[j];
}
}
var ind = data.indexOf(max);
console.log(data);
const key = data.slice(ind, ind + 1);
}
console.log(nth + " largest number is ", max);

Categories

Resources