I am woking on a project of weighted interpolation. Each station has a coordinate point on the map as shown below.
var stationCoor = [[408,352],[525,348],[535,495],[420,400],[272,145],[175,195],[197,335]];
I am taking points that are located in the lake and I am using those to create weighted averages for inputs from those stations. Here is my function for determining the weighted numbers.
function findWeightSpeed(xPos, yPos){
var totalHypt = 0;
var arrHpyt = [];
var arrWeight = [];
for(var l=0;l<7;l++){
var xDis = Math.abs(xPos-stationCoor[l][0]);
var yDis = Math.abs(yPos-stationCoor[l][1]);
var hptSq = Math.pow(xDis,2)+Math.pow(yDis,2);
var hypt = Math.sqrt(hptSq);
totalHypt = totalHypt+hypt;
arrHpyt.push(hypt);
}
for(var j=0;j<7;j++){
arrWeight.push(arrHpyt[j]/totalHypt)
}
return arrWeight;
}
This finds the hypotenuse between the point (xPos,yPos) and the stations. It then adds the data up and divides each station by the total yielding the weighted numbers.
I need to use these points to weight wind direction from these stations. I was using the funciotn below to calculate an average of points.
function averageAngles(){
var x = 0;
var y = 0;
var pi = 22/7;
var angle = [2.7925,2.8797,2.9670,3.0543, 0.0872]; // 310,320,330,340,10
for(var i = 0; i < angle.length; i++) {
x += Math.cos(angle[i]);
y += Math.sin(angle[i]);
}
var average_angle = Math.atan2(y, x);
console.log((average_angle/pi)*360);
}
This gave me accurate information for a weighted average of .20 for all points. However, the weighted average points for the 7 stations (as seen below on the map) is similar to [0.1076839005418769, 0.08051796093187284, 0.003987308213631277, 0.08458358029618485, 0.2463427297217639, 0.26463834002675196, 0.21224618026791833].
How would I go about making a function that takes the weighted average numbers from the findWeightSpeed() and using that to weight the circular quantities in averageAngles()?
I used this How do you calculate the average of a set of circular data? to make the function for averaging angles.
Many thanks for any suggestions given.
Here is a link I found online that explains the entire procedure.
Computing Weighted Averages for Wind Speed and Direction
The code is similar to this.
function weightAllData(xPos,yPos,windData){
var uVecSum = 0;
var vVecSum = 0;
var arrayWeightSpeed = findWeightSpeed(xPos, yPos); //using weighted interpolation based on distance
var arrayWindSpeed = [WSData];
var arrayWindDirection = [WDData];
for(var m=0;m<7;m++){
uVecSum = uVecSum + (arrayWeightSpeed[m] * getUVector(arrayWindSpeed[m],(arrayWindDirection[m]/180)*Math.PI));
vVecSum = vVecSum + (arrayWeightSpeed[m] * getVVector(arrayWindSpeed[m],(arrayWindDirection[m]/180)*Math.PI));
}
var weightWS = Math.sqrt(Math.pow(uVecSum,2)+Math.pow(vVecSum,2));
if(vVecSum!=0){
weightWDRad = Math.atan(uVecSum/vVecSum);
}
if(vVecSum==0){
weightWDRad = Math.atan(uVecSum/(0.0001+vVecSum));
}
if(weightWDRad<0){
weightWDRad = weightWDRad + Math.PI
}
weightWD = (weightWDRad * (180/Math.PI));
}
Let me know if you want an explanation
Related
A game has a large number of unit objects held in an array.
These units have x/y coordinates locating them on a 2d map.
let units = [
{id: 1, x=3450, y = 1456},
{id: 2, x=5560, y = 2423},
{id: 3, x=1321, y = 3451}
]
Approx 50 times a second the game requires each unit to generate a list of other units within a given distance (to interact with them by fighting/avoiding etc).
As the unit count grows into the thousands the current process where each unit checks distance vs each other unit slows down dramatically as you start getting exponentially more tests required.
Looking into similar problems posted online we started grouping the units into row/column cell collections then only performing the distance tests on those that 'might' be close enough to be relevant. However we found that constructing this grouping actually takes longer to build and maintain than the gains it provided.
A testable version of the current code is below - on my fairly typical browser this takes about a second to complete and it needs to improve this substantially - suggestions for optimisations welcome.
//create the world
let mapWidth = 5000;
let mapHeight = 2000;
let releventDistance = 200;
let unitCount = 5000;
//new unit function creates a unit in a random position on the map
function newUnit(id){
let newUnit = {};
newUnit.id = id;
newUnit.x = Math.floor(Math.random()*mapWidth);
newUnit.y = Math.floor(Math.random()*mapHeight);
//this array of 'relevent' neigbours is the collection of other units close enough to interact with
newUnit.neighbours = [];
return newUnit;
}
//simple distance test
function distance (unit1, unit2){
let dx = unit1.x - unit2.x;
let dy = unit1.y - unit2.y;
return Math.sqrt(dx * dx + dy * dy);
}
//collection of units
var myUnits = [];
//populate the units
for (let i =0; i<unitCount; i++){
myUnits.push(newUnit(i));
}
console.log(unitCount + " units created");
//complete a full-scan with a nested foreach
let timeStamp1 = new Date();
myUnits.forEach(unit => {
myUnits.forEach(unit2 => {
//don't test a unit against itself
if(unit.id != unit2.id){
let unitDist = distance(unit, unit2);
if (unitDist <= releventDistance){
unit.neighbours.push({unit : unit2, distance : unitDist});
}
}
})
})
//print results
console.log((new Date() - timeStamp1) + "ms: to complete bruteforce fullscan");
//print average number of neigbours
let totalNeighbourCount = 0;
myUnits.forEach(myUnit => {totalNeighbourCount += myUnit.neighbours.length});
console.log(Math.floor(totalNeighbourCount/myUnits.length) + ": average number of neighbours");
You could iterate only from the index plus one for the inner loop and avoid visiting already visited pairs.
This approach requires to add the pair to each neighbor.
//create the world
let mapWidth = 5000;
let mapHeight = 2000;
let releventDistance = 200;
let unitCount = 5000;
//new unit function creates a unit in a random position on the map
function newUnit(id){
let newUnit = {};
newUnit.id = id;
newUnit.x = Math.floor(Math.random()*mapWidth);
newUnit.y = Math.floor(Math.random()*mapHeight);
//this array of 'relevent' neigbours is the collection of other units close enough to interact with
newUnit.neighbours = [];
return newUnit;
}
//simple distance test
function distance (unit1, unit2){
let dx = unit1.x - unit2.x;
let dy = unit1.y - unit2.y;
return Math.sqrt(dx * dx + dy * dy);
}
//collection of units
var myUnits = [];
//populate the units
for (let i =0; i<unitCount; i++){
myUnits.push(newUnit(i));
}
console.log(unitCount + " units created");
let timeStamp1 = new Date();
for (let i = 0, l1 = myUnits.length - 1; i < l1; i++) {
const unit = myUnits[i];
for (let j = i + 1, l2 = myUnits.length; j < l2; j++) {
const unit2 = myUnits[j];
let unitDist = distance(unit, unit2);
if (unitDist <= releventDistance) {
unit2.neighbours.push({ unit: unit, distance: unitDist });
unit.neighbours.push({ unit: unit2, distance: unitDist });
}
}
}
//print results
console.log((new Date() - timeStamp1) + "ms: to complete bruteforce fullscan");
//print average number of neigbours
let totalNeighbourCount = 0;
myUnits.forEach(myUnit => {totalNeighbourCount += myUnit.neighbours.length});
console.log(Math.floor(totalNeighbourCount/myUnits.length) + ": average number of neighbours");
This JavaScript code represents the concept of simple perceptron in a neural network. Below code is predicting fine with all truth table except XOR table. please run this code in your browser's console window and find what is wrong.
Since this is a simple single neuron, I didn't give much importance to hidden layers. I'am training this up to 10,000 iteration for better result.
//AND GATE
var X1 = [0,1,1,0];
var X2 = [0,1,0,1];
var OUT = [0,1,0,0];
/*
//AND GATE
var X1 = [0,1,1,0];
var X2 = [0,1,0,1];
var OUT = [0,1,0,0];
//OR GATE
var X1 = [0,1,1,0];
var X2 = [0,1,0,1];
var OUT = [0,1,1,1];
//NAND GATE
var X1 = [0,1,1,0];
var X2 = [0,1,0,1];
var OUT = [1,0,1,1];
//NOR GATE
var X1 = [0,1,1,0];
var X2 = [0,1,0,1];
var OUT = [1,0,0,0];
//XOR GATE
var X1 = [0,1,1,0];
var X2 = [0,1,0,1];
var OUT = [0,0,1,1];
*/
var LR = 0.01; //Learning rate to speedup learning process.
var BIAS = 1; // Avoid sum become zero.
var TRAIN = 10000; //Epochs we need to run for accurate result
var WEIGHTS = [Math.random(),Math.random(),Math.random()]; //3 Random weights 2 for input & 1 for bias
//console.log("Initial Weights : "+WEIGHTS);
function neuron(x1,x2,out){
var sum = 0;
var error = 0;
//Sum of weighted x1,x2 and bias
sum = x1*WEIGHTS[0] + x2*WEIGHTS[1] + BIAS*WEIGHTS[2];
//Heaviside step function as activation function
if(sum>1){
sum = 1;
}else{
sum = 0;
}
//Calculate the error
error = out - sum;
//Adjust weights
WEIGHTS[0] = WEIGHTS[0] + error * x1 * LR;
WEIGHTS[1] = WEIGHTS[1] + error * x2 * LR;
WEIGHTS[2] = WEIGHTS[2] + error * BIAS * LR;
//console.log("Weights adjust : "+WEIGHTS);
}
function Train(){
//Epoch iteration eg- 10000 is good
for(var k=1;k<=TRAIN;k++){
//Train Four sets of truth table
for(var i=0;i<X1.length;i++){
neuron(X1[i],X2[i],OUT[i]);
}
}
}
function Predict(x1,x2){
var predict = 0;
predict = x1*WEIGHTS[0] + x2*WEIGHTS[1] + BIAS*WEIGHTS[2];
if(predict>1){
predict = 1;
}else{
predict = 0;
}
//Predict for given input
console.log("The prediction for "+(x1+","+x2)+" is "+predict);
}
//First train the perceptron
Train();
//Predict for given input
Predict(1,1);
Predict(0,0);
Predict(1,0);
Predict(0,1);
The output for XOR gate is
The prediction for 1,1 is 1
The prediction for 0,0 is 1
The prediction for 1,0 is 1
The prediction for 0,1 is 1
Some sources state that is not possible to solve the XOR gate with a single perceptron.
Other sources let you know that you need higher order perceptrons to solve the XOR with a single Perceptron.
I quote from the second link:
Everyone who has ever studied about neural networks has probably
already read that a single perceptron can’t represent the boolean XOR
function. The book Artificial Intelligence: A Modern Approach, the
leading textbook in AI, says: “[XOR] is not linearly separable so the
perceptron cannot learn it” (p.730).
I hope this points you in the right direction. This kind of question is not new here.
The problem
I have an array which is a property of the class road, which defines the allowed vehicle types on the road, and supplies an approximate portion each type contributes to traffic:
[[bus,.2],[smallCar,.6],[bigCar,.2]]
I need to calculate the average length of the cars encountered on the road, given the proportion of traffic. (This is needed to do some basic calculations elsewhere in the class.)
What I have tried:
I can't quite wrap my head around a better way to do this. In my solution, fidelity changes with an increase in car count. This seems a really slow, heavy handed approach and not right. The function to improve is named averageVehicleLaneSpace.
A very paired down but working version of much larger classes:
road = function(){};
road.prototype.averageVehicleLaneSpace = function(){
var sum = 0;
var fakeCarCount = 10000;
for( var i = 0; i< this.allowedVehicleTypes.length; i++){
var type = this.allowedVehicleTypes[i][0];
var perc = this.allowedVehicleTypes[i][1];
for(n = 0; n<=fakeCarCount*perc; n++){
sum += vehicle[type].laneSpace;
}
}
return sum/fakeCarCount;
}
//define vehicles
var vehicle = {
bus:{
laneSpace:14
},
smallCar:{
laneSpace:4
},
bigCar:{
laneSpace:4.5
}
};
var t = new road();
t.allowedVehicleTypes = [["bus",.1],["smallCar",.3],["bigCar",.6]];
alert(t.averageVehicleLaneSpace());
The Fiddle:
This Fiddle is the hopeful example above: The fiddle.
The average is the sum of the ratio * laneSpace for each vehicle type. So calculating the average is as simple as:
road.prototype.averageVehicleLaneSpace = function(){
var avg = 0;
for (var i = 0; i < this.allowedVehicleTypes.length; i++) {
var type = this.allowedVehicleTypes[i][0];
var perc = this.allowedVehicleTypes[i][1];
avg += perc * vehicle[type].laneSpace;
}
return avg;
}
Say I have array [1,2,5,18,17,8] and I want to turn that into an array of length 40 that follows the same path.
a = [1,2,5,18,17,8];
stepSize = 1 / (40 / a.length);
then i think i could do something like
steps = [];
for( var i = 0; i < 1; i+= stepSize) {
steps.push(d3.interpolate(a[0],a[1])(i));
}
and then repeat that for all the elements. My question is there a better way to do this?
I can only guess what your real problem is but I think you want to plot these values and have a smooth curve. In that case use line.interpolate() https://github.com/mbostock/d3/wiki/SVG-Shapes#line_interpolate
In case you DO know what you need and your solution works for you, take this tip:
Never iterate over stepSize. Calculate it once and multiply it with i in every loop where i goes from 0 to 40. This way you work around precision problems.
Your algorithm cleaned up, tested and working:
var a = [1,5,12,76,1,2];
var steps = 24;
var ss = (a.length-1) / (steps-1);
var result = new Array(steps);
for (var i=0; i<steps; i++) {
var progress = ss * i;
var left = Math.floor(progress);
var right = Math.ceil(progress);
var factor = progress - left;
result[i] = (1 - factor) * a[left] + (factor) * a[right];
// alternative that actually works the same:
//result[i] = d3.interpolateNumber(a[left], a[right], factor);
}
console.log(result);
i am working on svg based application where user draw a path and i convert that path to points by using following function
path2Array: function(path, incrementer) {
var pointsArray = new Array();
var add = 1;
var point, tx, ty, cordinatesXY;
if (incrementer)
add = incrementer;
var length = Math.ceil(path.getTotalLength());
for (var i = 0; i <= length; i += add) {
point = path.getPointAtLength(i);
tx = Paper.roundDecimalNumber(point.x);// custom rounding function
ty = Paper.roundDecimalNumber(point.y);
cordinatesXY = {
x: tx,
y: ty
}
pointsArray.push(cordinatesXY);
}
return pointsArray;
}
Now , i want to get points in equal interval.
For example, let say in an svg path, my function returns an array which have
Total_nodes=200;
now i want 5 nodes at equal distance, so i divide 200/5=40;
for(var i=0;i<points.length;i+=40)
{
var point=points[i];
// do something
}
and i loop the points array with '40' increment to get 5 points at equal interval.
What didn't work, is when nodes are 212. now if i divide 212/5=42.4 (not an integer index). So now how i can get points at equal interval if i get index in decimal point.