I'm trying to create a virtual multiplayer platform for live concerts using three.js and socket.io.
While in single-player everything works fine, but when I try to do tests to see if the website could handle a big load of players by creating multiple NPCs at the same time it all falls apart and it gives me these errors:
THREE.WebGLProgram: shader error: 0 35715 false gl.getProgramInfoLog Varyings over maximum register limit
THREE.WebGLProgram: shader error: 1282 35715 false gl.getProgramInfoLog Varyings over maximum register limit
The error comes up when I create more then 8 NPCs using this for loop:
for (let i = 0; i < 50; i++) {
var x,y,z;
x = Math.random() * 2000;
x *= Math.round(Math.random()) ? 1 : -1;
z = Math.random() * (19000-8000)+8000;
let smurf = new Npc(this, this.options, "Groupie", x, -190, -z, false, "Fan");
this.npcs.push(smurf);
this.remoteNPCsColliders.push(smurf);
}
From what I've understood through the three.js documentation, the issue consists of passing too many varyings at the same time, but at the same time it seems that varyings are additional data from shaders, which are not used in my code. This is the NPC object code:
class Npc {
constructor(game, options, identifier, x, y, z, conv, type) {
let model, colour;
const colours = ['Black', 'Brown', 'White'];
colour = colours[Math.floor(Math.random() * colours.length)];
if (options === undefined) {
const people = ['BeachBabe', 'BusinessMan', 'Doctor', 'FireFighter', 'Housewife', 'Policeman', 'Prostitute', 'Punk', 'RiotCop', 'Roadworker', 'Robber', 'Sheriff', 'Streetman', 'Waitress'];
model = people[Math.floor(Math.random() * people.length)];
}
this.model = model;
this.colour = colour;
this.game = game;
this.animations = this.game.animations;
const geometry = new THREE.BoxGeometry(100, 300, 100);
const material = new THREE.MeshBasicMaterial({
visible: false
});
const box = new THREE.Mesh(geometry, material);
box.name = "Collider";
box.position.set(0, 150, 0);
this.collider = box;
const loader = new FBXLoader();
const npc = this;
loader.load(`${game.assetsPath}fbx/people/${type}.fbx`, function(object) {
object.mixer = new THREE.AnimationMixer(object);
npc.root = object;
npc.mixer = object.mixer;
object.name = identifier;
object.conv = conv;
object.traverse(function(child) {
if (child.isMesh) {
child.castShadow = true;
child.receiveShadow = true;
}
});
const textureLoader = new THREE.TextureLoader();
npc.object = new THREE.Object3D();
npc.object.position.set(x, y, z);
//npc.object.position.set(0, 0, 0);
npc.object.rotation.set(0, -Math.PI / 2, 0);
npc.object.scale.set(2.3, 2.3, 2.3);
npc.object.add(object);
if (npc.deleted === undefined) game.scene.add(npc.object);
npc.object.add(box);
//if (game.animations.Idle!==undefined) npc.action = "Happy";
npc.action = object.animations[0]
});
}
set action(name){
//Make a copy of the clip if this is a remote player
if (this.actionName == name) return;
const clip = this.animations[name];
const action = this.mixer.clipAction( name );
action.time = 0;
this.mixer.stopAllAction();
action.startAt(0.03333);
this.actionName = name;
this.actionTime = Date.now();
action.fadeIn(0.5);
action.play();
}
get action(){
return this.actionName;
}
}
I've tried commenting every bit of code in here but with no success. The only thing that fixed the error was disabling the shadow map of the renderer, but that for some reason overexposed the whole map and made everything unplayable.
The only other questions on the topic that I could find put the problem on the lights, which I guess could be an explanation since disabling the shadow map fixed the error, but I've disabled every single light and the error still occurs.
I'm literally going crazy over this, I hope that someone can help me!
Related
I'm trying to implement instancing in Aframe using the ThreeJs InstancedMesh based on the example here: https://github.com/mrdoob/three.js/blob/master/examples/webgl_instancing_dynamic.html
Relevant section of code here:
init: function() {
const {count, radius, scale, colors, positions} = this.data;
this.start = true;
this.dummy = new THREE.Object3D();
this.count = count;
this.startObject = new THREE.Object3D();
this.endObject = new THREE.Object3D();
this.instanceColors = new Float32Array(count * 3);
this.instanceColorsBase = new Float32Array(this.instanceColors.length);
this.vertices = [];
this.rotations = [];
for ( var i = 0; i < this.data.count; i ++ ) {
var x = this.data.positions[i][0] * this.data.scale;
var y = this.data.positions[i][1] * this.data.scale;
var z = this.data.positions[i][2] * this.data.scale;
var xEnd = x + this.data.endPositions[i][0] * this.data.scale;
var yEnd = y + this.data.endPositions[i][1] * this.data.scale;
var zEnd = z + this.data.endPositions[i][2] * this.data.scale;
this.vertices.push( x, y, z );
const rotation = this.getDirection({'x':x,'y':y,'z':z},
{'x':xEnd,'y':yEnd,'z':zEnd});
this.rotations.push(rotation.x, rotation.y, rotation.z);
}
let mesh;
let geometry;
let material;
const loader = new THREE.GLTFLoader();
const el = this.el;
loader.load("/assets/arrow/arrow.gltf", function ( model ) {
geometry = model.scene.children[0].children[0].geometry;
geometry.computeVertexNormals();
geometry.scale( 0.03, 0.03, 0.03 );
material = new THREE.MeshNormalMaterial();
mesh = new THREE.InstancedMesh( geometry, material, count );
mesh.instanceMatrix.setUsage( THREE.DynamicDrawUsage );
el.object3D.add(mesh);
} );
this.el.setAttribute("id", "cells");
},
setMatrix: function (start) {
if (this.mesh) {
for ( let i = 0; i < this.count; i ++ ) {
var x = this.data.positions[i][0] * this.data.scale;
var y = this.data.positions[i][1] * this.data.scale;
var z = this.data.positions[i][2] * this.data.scale;
var xEnd = x + this.data.endPositions[i][0] * this.data.scale;
var yEnd = y + this.data.endPositions[i][1] * this.data.scale;
var zEnd = z + this.data.endPositions[i][2] * this.data.scale;
if (start) {
this.dummy.position.set(xEnd, yEnd, zEnd);
} else {
this.dummy.position.set(x, y, z);
}
this.dummy.rotation.x = this.rotations[i][0];
this.dummy.rotation.y = this.rotations[i][1];
this.dummy.rotation.z = this.rotations[i][2];
this.dummy.updateMatrix();
this.mesh.setMatrixAt( i, this.dummy.matrix );
}
this.mesh.instanceMatrix.needsUpdate = true;
}
}
tick: function() {
this.setMatrix(this.start);
this.start = !this.start;
},
No errors or relevant messages that I can see, but none of the instanced objects are rendering. I don't really have a good way to post an example unfortunately. Anyone know what I'm doing wrong? Thanks in advance!
Note: It seems that the objects are being rendered because the number of triangles being drawn increases drastically when I add this component. However, they are not visible anywhere and I can't find them in the aframe inspector either
It's a very case specific question with a quite extensive topic, so:
In general, using THREE.InstancedMeshes is simple, and you got it right:
// create an instanced mesh
let iMesh = new THREE.InstancedMesh(geometry, material, count)
element.object3D.add(iMesh)
// manipulate the instances
let mtx = new Matrix4()
// set the position, rotation, scale of the matrix
// ...
// update the instance
iMesh.setMatrixAt(index, mtx);
iMesh.instanceMatrix.needsUpdate = true;
Example of an instanced gltf model here
Your code is doing a lot, and it would be easier if it could be stripped to a bare minimum. Yet I think there is only one major issue - this.model isn't set anywhere, so the setMatrix function does nothing. Other than that you may need to disable frustum culling (mesh.frustumCulling = false), or set a bounding sphere - otherwise the objects may dissapear when the base object is out of sight.
Once it's set, your code seems to be working
I would like to figure out how to map out the controls for my oculus quest and other devices, using three.js and webXR. The code works, and allows me to move the controller, maps a cylinder to each control, and allows me to use the trigger to controls to change the color of the cylinders. This is great, but I can't find any documentation on how to use axis controls for the joy stick, the grip and the other buttons. Part of me wants to believe it's as simple as knowing which event to call, because I don't know what other events are available.
Here is a link to the tutorial I based this off of. https://github.com/as-ideas/webvr-with-threejs
Please note that this code works as expected, but I don't know how totake it further and do more.
function createController(controllerID, videoinput) {
//RENDER CONTROLLER AS YELLOW TUBE
const controller = renderer.vr.getController(controllerID);
const cylinderGeometry = new CylinderGeometry(0.025, 0.025, 1, 32);
const cylinderMaterial = new MeshPhongMaterial({ color: 0xffff00 });
const cylinder = new Mesh(cylinderGeometry, cylinderMaterial);
cylinder.geometry.translate(0, 0.5, 0);
cylinder.rotateX(-0.25 * Math.PI);
controller.add(cylinder);
cameraFixture.add(controller);
//TRIGGER
controller.addEventListener('selectstart', () => {
if (controllerID === 0) {
cylinderMaterial.color.set('pink')
} else {
cylinderMaterial.color.set('orange');
videoinput.play()
}
});
controller.addEventListener('selectend', () => {
cylinderMaterial.color.set(0xffff00);
videoinput.pause();
console.log('I pressed play');
});
}
As of three.js 0.119, integrated 'events' from the other buttons, trackpads, haptics, and thumbsticks of a touch controller are not provided, only select and squeeze events are available. three.js has a functional model of 'just working' regardless of what type of input device you have and only provides for managing events that can be produced by all input devices (ie. select)
Luckily, we are not limited by what three.js has made available and can just poll the controller data directly.
Touch controllers follow the model of 'gamepad' controls and just report their instantanous values. We will poll the gamepad for its current values of the various buttons and keep track of their state and create 'events' within our code for button pushes, trackpad and thumbstick axis changes.
To access the instantaneous data from a touch controller while within a webXR session
const session = renderer.xr.getSession();
let i = 0;
if (session) {
for (const source of session.inputSources) {
if (source && source.handedness) {
handedness = source.handedness; //left or right controllers
}
if (!source.gamepad) continue;
const controller = renderer.xr.getController(i++);
const old = prevGamePads.get(source);
const data = {
handedness: handedness,
buttons: source.gamepad.buttons.map((b) => b.value),
axes: source.gamepad.axes.slice(0)
};
//process data accordingly to create 'events'
Haptic feedback is provided through a promise (Note not all browsers currently support the webXR haptic feedback, but Oculus Browser and Firefox Reality on quest do)
When availble, the haptic feedback is produced through a promise:
var didPulse = sourceXR.gamepad.hapticActuators[0].pulse(0.8, 100);
//80% intensity for 100ms
//subsequent promises cancel any previous promise still underway
To demonstrate this solution I have modified threejs.org/examples/#webXR_vr_dragging example by adding the camera to a 'dolly' that can be moved around with the touch controllers thumbsticks when within a webXR session and provide various haptic feedback for events such as raycasting onto an object or axis movements on thumbsticks.
For each frame, we poll the data from the touch controllers and respond accordingly. We have to store the data from frame to frame to detect changes and create our events, and filter out some data (false 0's and up to 20% randomdrift from 0 in thumbstick axis values on some controllers) For proper 'forward and sideways' dolly movement the current heading and attitude of the webXR camera is also needed each frame and accessed via:
let xrCamera = renderer.xr.getCamera(camera);
xrCamera.getWorldDirection(cameraVector);
//heading vector for webXR camera now within cameraVector
Example codepen here:
codepen.io/jason-buchheim/pen/zYqYGXM
With 'ENTER VR' button exposed (debug view) here:cdpn.io/jason-buchheim/debug/zYqYGXM
Full code with modifications of original threejs example highlighted with comment blocks
//// From webxr_vr_dragging example https://threejs.org/examples/#webxr_vr_dragging
import * as THREE from "https://cdn.jsdelivr.net/npm/three#0.119.1/build/three.module.min.js";
import { OrbitControls } from "https://cdn.jsdelivr.net/npm/three#0.119.1/examples/jsm/controls/OrbitControls.min.js";
import { VRButton } from "https://cdn.jsdelivr.net/npm/three#0.119.1/examples/jsm/webxr/VRButton.min.js";
import { XRControllerModelFactory } from "https://cdn.jsdelivr.net/npm/three#0.119.1/examples/jsm/webxr/XRControllerModelFactory.min.js";
var container;
var camera, scene, renderer;
var controller1, controller2;
var controllerGrip1, controllerGrip2;
var raycaster,
intersected = [];
var tempMatrix = new THREE.Matrix4();
var controls, group;
////////////////////////////////////////
//// MODIFICATIONS FROM THREEJS EXAMPLE
//// a camera dolly to move camera within webXR
//// a vector to reuse each frame to store webXR camera heading
//// a variable to store previous frames polling of gamepads
//// a variable to store accumulated accelerations along axis with continuous movement
var dolly;
var cameraVector = new THREE.Vector3(); // create once and reuse it!
const prevGamePads = new Map();
var speedFactor = [0.1, 0.1, 0.1, 0.1];
////
//////////////////////////////////////////
init();
animate();
function init() {
container = document.createElement("div");
document.body.appendChild(container);
scene = new THREE.Scene();
scene.background = new THREE.Color(0x808080);
camera = new THREE.PerspectiveCamera(
50,
window.innerWidth / window.innerHeight,
0.1,
500 //MODIFIED FOR LARGER SCENE
);
camera.position.set(0, 1.6, 3);
controls = new OrbitControls(camera, container);
controls.target.set(0, 1.6, 0);
controls.update();
var geometry = new THREE.PlaneBufferGeometry(100, 100);
var material = new THREE.MeshStandardMaterial({
color: 0xeeeeee,
roughness: 1.0,
metalness: 0.0
});
var floor = new THREE.Mesh(geometry, material);
floor.rotation.x = -Math.PI / 2;
floor.receiveShadow = true;
scene.add(floor);
scene.add(new THREE.HemisphereLight(0x808080, 0x606060));
var light = new THREE.DirectionalLight(0xffffff);
light.position.set(0, 200, 0); // MODIFIED SIZE OF SCENE AND SHADOW
light.castShadow = true;
light.shadow.camera.top = 200; // MODIFIED FOR LARGER SCENE
light.shadow.camera.bottom = -200; // MODIFIED FOR LARGER SCENE
light.shadow.camera.right = 200; // MODIFIED FOR LARGER SCENE
light.shadow.camera.left = -200; // MODIFIED FOR LARGER SCENE
light.shadow.mapSize.set(4096, 4096);
scene.add(light);
group = new THREE.Group();
scene.add(group);
var geometries = [
new THREE.BoxBufferGeometry(0.2, 0.2, 0.2),
new THREE.ConeBufferGeometry(0.2, 0.2, 64),
new THREE.CylinderBufferGeometry(0.2, 0.2, 0.2, 64),
new THREE.IcosahedronBufferGeometry(0.2, 3),
new THREE.TorusBufferGeometry(0.2, 0.04, 64, 32)
];
for (var i = 0; i < 100; i++) {
var geometry = geometries[Math.floor(Math.random() * geometries.length)];
var material = new THREE.MeshStandardMaterial({
color: Math.random() * 0xffffff,
roughness: 0.7,
side: THREE.DoubleSide, // MODIFIED TO DoubleSide
metalness: 0.0
});
var object = new THREE.Mesh(geometry, material);
object.position.x = Math.random() * 200 - 100; // MODIFIED FOR LARGER SCENE
object.position.y = Math.random() * 100; // MODIFIED FOR LARGER SCENE
object.position.z = Math.random() * 200 - 100; // MODIFIED FOR LARGER SCENE
object.rotation.x = Math.random() * 2 * Math.PI;
object.rotation.y = Math.random() * 2 * Math.PI;
object.rotation.z = Math.random() * 2 * Math.PI;
object.scale.setScalar(Math.random() * 20 + 0.5); // MODIFIED FOR LARGER SCENE
object.castShadow = true;
object.receiveShadow = true;
group.add(object);
}
// renderer
renderer = new THREE.WebGLRenderer({ antialias: true });
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.outputEncoding = THREE.sRGBEncoding;
renderer.shadowMap.enabled = true;
renderer.xr.enabled = true;
//the following increases the resolution on Quest
renderer.xr.setFramebufferScaleFactor(2.0);
container.appendChild(renderer.domElement);
document.body.appendChild(VRButton.createButton(renderer));
// controllers
controller1 = renderer.xr.getController(0);
controller1.name="left"; ////MODIFIED, added .name="left"
controller1.addEventListener("selectstart", onSelectStart);
controller1.addEventListener("selectend", onSelectEnd);
scene.add(controller1);
controller2 = renderer.xr.getController(1);
controller2.name="right"; ////MODIFIED added .name="right"
controller2.addEventListener("selectstart", onSelectStart);
controller2.addEventListener("selectend", onSelectEnd);
scene.add(controller2);
var controllerModelFactory = new XRControllerModelFactory();
controllerGrip1 = renderer.xr.getControllerGrip(0);
controllerGrip1.add(
controllerModelFactory.createControllerModel(controllerGrip1)
);
scene.add(controllerGrip1);
controllerGrip2 = renderer.xr.getControllerGrip(1);
controllerGrip2.add(
controllerModelFactory.createControllerModel(controllerGrip2)
);
scene.add(controllerGrip2);
//Raycaster Geometry
var geometry = new THREE.BufferGeometry().setFromPoints([
new THREE.Vector3(0, 0, 0),
new THREE.Vector3(0, 0, -1)
]);
var line = new THREE.Line(geometry);
line.name = "line";
line.scale.z = 50; //MODIFIED FOR LARGER SCENE
controller1.add(line.clone());
controller2.add(line.clone());
raycaster = new THREE.Raycaster();
////////////////////////////////////////
//// MODIFICATIONS FROM THREEJS EXAMPLE
//// create group named 'dolly' and add camera and controllers to it
//// will move dolly to move camera and controllers in webXR
dolly = new THREE.Group();
dolly.position.set(0, 0, 0);
dolly.name = "dolly";
scene.add(dolly);
dolly.add(camera);
dolly.add(controller1);
dolly.add(controller2);
dolly.add(controllerGrip1);
dolly.add(controllerGrip2);
////
///////////////////////////////////
window.addEventListener("resize", onWindowResize, false);
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
function onSelectStart(event) {
var controller = event.target;
var intersections = getIntersections(controller);
if (intersections.length > 0) {
var intersection = intersections[0];
var object = intersection.object;
object.material.emissive.b = 1;
controller.attach(object);
controller.userData.selected = object;
}
}
function onSelectEnd(event) {
var controller = event.target;
if (controller.userData.selected !== undefined) {
var object = controller.userData.selected;
object.material.emissive.b = 0;
group.attach(object);
controller.userData.selected = undefined;
}
}
function getIntersections(controller) {
tempMatrix.identity().extractRotation(controller.matrixWorld);
raycaster.ray.origin.setFromMatrixPosition(controller.matrixWorld);
raycaster.ray.direction.set(0, 0, -1).applyMatrix4(tempMatrix);
return raycaster.intersectObjects(group.children);
}
function intersectObjects(controller) {
// Do not highlight when already selected
if (controller.userData.selected !== undefined) return;
var line = controller.getObjectByName("line");
var intersections = getIntersections(controller);
if (intersections.length > 0) {
var intersection = intersections[0];
////////////////////////////////////////
//// MODIFICATIONS FROM THREEJS EXAMPLE
//// check if in webXR session
//// if so, provide haptic feedback to the controller that raycasted onto object
//// (only if haptic actuator is available)
const session = renderer.xr.getSession();
if (session) { //only if we are in a webXR session
for (const sourceXR of session.inputSources) {
if (!sourceXR.gamepad) continue;
if (
sourceXR &&
sourceXR.gamepad &&
sourceXR.gamepad.hapticActuators &&
sourceXR.gamepad.hapticActuators[0] &&
sourceXR.handedness == controller.name
) {
var didPulse = sourceXR.gamepad.hapticActuators[0].pulse(0.8, 100);
}
}
}
////
////////////////////////////////
var object = intersection.object;
object.material.emissive.r = 1;
intersected.push(object);
line.scale.z = intersection.distance;
} else {
line.scale.z = 50; //MODIFIED AS OUR SCENE IS LARGER
}
}
function cleanIntersected() {
while (intersected.length) {
var object = intersected.pop();
object.material.emissive.r = 0;
}
}
function animate() {
renderer.setAnimationLoop(render);
}
function render() {
cleanIntersected();
intersectObjects(controller1);
intersectObjects(controller2);
////////////////////////////////////////
//// MODIFICATIONS FROM THREEJS EXAMPLE
//add gamepad polling for webxr to renderloop
dollyMove();
////
//////////////////////////////////////
renderer.render(scene, camera);
}
////////////////////////////////////////
//// MODIFICATIONS FROM THREEJS EXAMPLE
//// New dollyMove() function
//// this function polls gamepad and keeps track of its state changes to create 'events'
function dollyMove() {
var handedness = "unknown";
//determine if we are in an xr session
const session = renderer.xr.getSession();
let i = 0;
if (session) {
let xrCamera = renderer.xr.getCamera(camera);
xrCamera.getWorldDirection(cameraVector);
//a check to prevent console errors if only one input source
if (isIterable(session.inputSources)) {
for (const source of session.inputSources) {
if (source && source.handedness) {
handedness = source.handedness; //left or right controllers
}
if (!source.gamepad) continue;
const controller = renderer.xr.getController(i++);
const old = prevGamePads.get(source);
const data = {
handedness: handedness,
buttons: source.gamepad.buttons.map((b) => b.value),
axes: source.gamepad.axes.slice(0)
};
if (old) {
data.buttons.forEach((value, i) => {
//handlers for buttons
if (value !== old.buttons[i] || Math.abs(value) > 0.8) {
//check if it is 'all the way pushed'
if (value === 1) {
//console.log("Button" + i + "Down");
if (data.handedness == "left") {
//console.log("Left Paddle Down");
if (i == 1) {
dolly.rotateY(-THREE.Math.degToRad(1));
}
if (i == 3) {
//reset teleport to home position
dolly.position.x = 0;
dolly.position.y = 5;
dolly.position.z = 0;
}
} else {
//console.log("Right Paddle Down");
if (i == 1) {
dolly.rotateY(THREE.Math.degToRad(1));
}
}
} else {
// console.log("Button" + i + "Up");
if (i == 1) {
//use the paddle buttons to rotate
if (data.handedness == "left") {
//console.log("Left Paddle Down");
dolly.rotateY(-THREE.Math.degToRad(Math.abs(value)));
} else {
//console.log("Right Paddle Down");
dolly.rotateY(THREE.Math.degToRad(Math.abs(value)));
}
}
}
}
});
data.axes.forEach((value, i) => {
//handlers for thumbsticks
//if thumbstick axis has moved beyond the minimum threshold from center, windows mixed reality seems to wander up to about .17 with no input
if (Math.abs(value) > 0.2) {
//set the speedFactor per axis, with acceleration when holding above threshold, up to a max speed
speedFactor[i] > 1 ? (speedFactor[i] = 1) : (speedFactor[i] *= 1.001);
console.log(value, speedFactor[i], i);
if (i == 2) {
//left and right axis on thumbsticks
if (data.handedness == "left") {
// (data.axes[2] > 0) ? console.log('left on left thumbstick') : console.log('right on left thumbstick')
//move our dolly
//we reverse the vectors 90degrees so we can do straffing side to side movement
dolly.position.x -= cameraVector.z * speedFactor[i] * data.axes[2];
dolly.position.z += cameraVector.x * speedFactor[i] * data.axes[2];
//provide haptic feedback if available in browser
if (
source.gamepad.hapticActuators &&
source.gamepad.hapticActuators[0]
) {
var pulseStrength = Math.abs(data.axes[2]) + Math.abs(data.axes[3]);
if (pulseStrength > 0.75) {
pulseStrength = 0.75;
}
var didPulse = source.gamepad.hapticActuators[0].pulse(
pulseStrength,
100
);
}
} else {
// (data.axes[2] > 0) ? console.log('left on right thumbstick') : console.log('right on right thumbstick')
dolly.rotateY(-THREE.Math.degToRad(data.axes[2]));
}
controls.update();
}
if (i == 3) {
//up and down axis on thumbsticks
if (data.handedness == "left") {
// (data.axes[3] > 0) ? console.log('up on left thumbstick') : console.log('down on left thumbstick')
dolly.position.y -= speedFactor[i] * data.axes[3];
//provide haptic feedback if available in browser
if (
source.gamepad.hapticActuators &&
source.gamepad.hapticActuators[0]
) {
var pulseStrength = Math.abs(data.axes[3]);
if (pulseStrength > 0.75) {
pulseStrength = 0.75;
}
var didPulse = source.gamepad.hapticActuators[0].pulse(
pulseStrength,
100
);
}
} else {
// (data.axes[3] > 0) ? console.log('up on right thumbstick') : console.log('down on right thumbstick')
dolly.position.x -= cameraVector.x * speedFactor[i] * data.axes[3];
dolly.position.z -= cameraVector.z * speedFactor[i] * data.axes[3];
//provide haptic feedback if available in browser
if (
source.gamepad.hapticActuators &&
source.gamepad.hapticActuators[0]
) {
var pulseStrength = Math.abs(data.axes[2]) + Math.abs(data.axes[3]);
if (pulseStrength > 0.75) {
pulseStrength = 0.75;
}
var didPulse = source.gamepad.hapticActuators[0].pulse(
pulseStrength,
100
);
}
}
controls.update();
}
} else {
//axis below threshold - reset the speedFactor if it is greater than zero or 0.025 but below our threshold
if (Math.abs(value) > 0.025) {
speedFactor[i] = 0.025;
}
}
});
}
///store this frames data to compate with in the next frame
prevGamePads.set(source, data);
}
}
}
}
function isIterable(obj) { //function to check if object is iterable
// checks for null and undefined
if (obj == null) {
return false;
}
return typeof obj[Symbol.iterator] === "function";
}
////
/////////////////////////////////////
i have tried a lot of ways to go around this topic, before asking and now i really have no clue how to accomplish object picking with gpu on a gltf loaded file, so im hoping for any help that i can get :(
I've loaded a huge GLTF file, with a lot of little objects in it, due to the file count its not possible to achieve a good fps, if i just add them to the scene, so i have managed to achieve 60fps merging the gltfs children into chunks, but when i try to implement the webgl_interactive_cubes_gpu example, but it doesn't seem to be working for me, I always get the same object when im clicking.
To debug i have tried rendering the pickingScene and everything seems to be in place, graphically speaking, but when it comes to picking it doesnt seem to be working as i expected, unless im doing something wrong.
Raycast picking is not a suitable option for me as there are a lot of objects and adding renderin them would kill the fps. (55k objects);
Below is the code once the gltf is loaded:
var child = gltf.scene.children[i];
var childGeomCopy = child.geometry.clone();
childGeomCopy.translate(geomPosition.x, geomPosition.y, geomPosition.z);
childGeomCopy.scale(child.scale.x * Scalar, child.scale.y * Scalar, child.scale.z * Scalar);
childGeomCopy.computeBoundingBox();
childGeomCopy.computeBoundingSphere();
childGeomCopy.applyMatrix(new THREE.Matrix4());
geometriesPicking.push(childGeomCopy);
var individualObj = new THREE.Mesh(childGeomCopy, IndividualObjMat);
individualObj.name = "individual_" + child.name;
pickingData[childCounter] = {
object: individualObj,
position: individualObj.position.clone(),
rotation: individualObj.rotation.clone(),
scale: individualObj.scale.clone()
};
childCounter++;
Edit:
gltf.scene.traverse(function (child) {
//console.log(child.type);
if (child.isMesh) {
let geometry = child.geometry.clone();
let position = new THREE.Vector3();
position.x = child.position.x;
position.y = child.position.y;
position.z = child.position.z;
let rotation = new THREE.Euler();
rotation.x = child.rotation.x;
rotation.y = child.rotation.y;
rotation.z = child.rotation.z;
let scale = new THREE.Vector3();
scale.x = child.scale.x;
scale.y = child.scale.y;
scale.z = child.scale.z;
quaternion.setFromEuler(rotation);
matrix.compose(position.multiplyScalar(Scalar), quaternion, scale.multiplyScalar(Scalar));
geometry.applyMatrix(matrix);
applyVertexColors(geometry, color.setHex(Math.random() * 0xffffff));
geometriesDrawn.push(geometry);
geometry = geometry.clone();
applyVertexColors(geometry, color.setHex(childCounter));
geometriesPicking.push(geometry);
pickingData[childCounter] = {
object: new THREE.Mesh(geometry.clone(), new THREE.MeshBasicMaterial({ color: 0xffff00, blending: THREE.AdditiveBlending, transparent: true, opacity: 0.8 })),
id: childCounter,
position: position,
rotation: rotation,
scale: scale
};
childCounter++;
//console.log("%c [childCounter] :", "", childCounter);
}
});
...
var pickingGeom = THREE.BufferGeometryUtils.mergeBufferGeometries(geometriesPicking);
pickingGeom.rotateX(THREE.Math.degToRad(90)); pickingScene.add(new THREE.Mesh(pickingGeom, pickingMaterial));
Then on my MouseUp function I call pick(mouse*) and pass in the mouse* information:
function pick(mouse) {
camera.setViewOffset(renderer.domElement.width, renderer.domElement.height, mouse.x * window.devicePixelRatio | 0, mouse.y * window.devicePixelRatio | 0, 1, 1);
renderer.setRenderTarget(pickingTexture);
renderer.render(pickingScene, camera);
camera.clearViewOffset();
var pixelBuffer = new Uint8Array(4);
renderer.readRenderTargetPixels(pickingTexture, 0, 0, 1, 1, pixelBuffer);
var id = (pixelBuffer[0] << 16) | (pixelBuffer[1] << 8) | (pixelBuffer[2]);
var data = pickingData[id];
if (data) {
console.log(data.object.name, ":", data.position); // Always return the same object
}}
Based on a previous question I had recently posted:
How to create lines between nearby particles in ThreeJS?
I was able to create individual lines joining nearby particles. However, the lines are being drawn twice because of the logic of the particle system. This is because of how the original 2D particle system worked:
https://awingit.github.io/particles/
This also draws the lines twice. For each pair of particles connecting a line, the line is drawn.
I do not think this is ideal for performance. How would I only draw a line once for each joining points?
P.S. Here is exactly the effect I would like to achieve, but cannot make sense of the code:
http://freelance-html-developer.com/clock/
I would like to understand the fundamental logic.
UPDATE:
I have created a jsfiddle with my progress.
var canvas, canvasDom, ctx, scene, renderer, camera, controls, geocoder, deviceOrientation = false;
var width = 800,
height = 600;
var particleCount = 20;
var pMaterial = new THREE.PointsMaterial({
color: 0x000000,
size: 0.5,
blending: THREE.AdditiveBlending,
//depthTest: false,
//transparent: true
});
var particles = new THREE.Geometry;
var particleSystem;
var line;
var lines = {};
var lineGroup = new THREE.Group();
var lineMaterial = new THREE.LineBasicMaterial({
color: 0x000000,
linewidth: 1
});
var clock = new THREE.Clock();
var maxDistance = 15;
function init() {
canvasDom = document.getElementById('canvas');
setupStage();
setupRenderer();
setupCamera();
setupControls();
setupLights();
clock.start();
window.addEventListener('resize', onWindowResized, false);
onWindowResized(null);
createParticles();
scene.add(lineGroup);
animate();
}
function setupStage() {
scene = new THREE.Scene();
}
function setupRenderer() {
renderer = new THREE.WebGLRenderer({
canvas: canvasDom,
logarithmicDepthBuffer: true
});
renderer.setSize(width, height);
renderer.setClearColor(0xfff6e6);
}
function setupCamera() {
camera = new THREE.PerspectiveCamera(70, width / height, 1, 10000);
camera.position.set(0, 0, -60);
}
function setupControls() {
if (deviceOrientation) {
controls = new THREE.DeviceOrientationControls(camera);
controls.connect();
} else {
controls = new THREE.OrbitControls(camera, renderer.domElement);
controls.target = new THREE.Vector3(0, 0, 0);
}
}
function setupLights() {
var light1 = new THREE.AmbientLight(0xffffff, 0.5); // soft white light
var light2 = new THREE.PointLight(0xffffff, 1, 0);
light2.position.set(100, 200, 100);
scene.add(light1);
scene.add(light2);
}
function animate() {
requestAnimationFrame(animate);
controls.update();
animateParticles();
updateLines();
render();
}
function render() {
renderer.render(scene, camera);
}
function onWindowResized(event) {
width = window.innerWidth;
height = window.innerHeight;
camera.aspect = width / height;
camera.updateProjectionMatrix();
renderer.setSize(width, height);
}
function createParticles() {
for (var i = 0; i < particleCount; i++) {
var pX = Math.random() * 50 - 25,
pY = Math.random() * 50 - 25,
pZ = Math.random() * 50 - 25,
particle = new THREE.Vector3(pX, pY, pZ);
particle.diff = Math.random() + 0.2;
particle.default = new THREE.Vector3(pX, pY, pZ);
particle.offset = new THREE.Vector3(0, 0, 0);
particle.velocity = {};
particle.velocity.y = particle.diff * 0.5;
particle.nodes = [];
particles.vertices.push(particle);
}
particleSystem = new THREE.Points(particles, pMaterial);
particleSystem.position.y = 0;
scene.add(particleSystem);
}
function animateParticles() {
var pCount = particleCount;
while (pCount--) {
var particle = particles.vertices[pCount];
var move = Math.sin(clock.getElapsedTime() * (1 * particle.diff)) / 4;
particle.offset.y += move * particle.velocity.y;
particle.y = particle.default.y + particle.offset.y;
detectCloseByPoints(particle);
}
particles.verticesNeedUpdate = true;
particleSystem.rotation.y += 0.01;
lineGroup.rotation.y += 0.01;
}
function updateLines() {
for (var _lineKey in lines) {
if (!lines.hasOwnProperty(_lineKey)) {
continue;
}
lines[_lineKey].geometry.verticesNeedUpdate = true;
}
}
function detectCloseByPoints(p) {
var _pCount = particleCount;
while (_pCount--) {
var _particle = particles.vertices[_pCount];
if (p !== _particle) {
var _distance = p.distanceTo(_particle);
var _connection = checkConnection(p, _particle);
if (_distance < maxDistance) {
if (!_connection) {
createLine(p, _particle);
}
} else if (_connection) {
removeLine(_connection);
}
}
}
}
function checkConnection(p1, p2) {
var _childNode, _parentNode;
_childNode = p1.nodes[particles.vertices.indexOf(p2)] || p2.nodes[particles.vertices.indexOf(p1)];
if (_childNode && _childNode !== undefined) {
_parentNode = (_childNode == p1) ? p2 : p1;
}
if (_parentNode && _parentNode !== undefined) {
return {
parent: _parentNode,
child: _childNode,
lineId: particles.vertices.indexOf(_parentNode) + '-' + particles.vertices.indexOf(_childNode)
};
} else {
return false;
}
}
function removeLine(_connection) {
// Could animate line out
var childIndex = particles.vertices.indexOf(_connection.child);
_connection.parent.nodes.splice(childIndex, 1);
deleteLine(_connection.lineId);
}
function deleteLine(_id) {
lineGroup.remove(lines[_id]);
delete lines[_id];
}
function addLine(points) {
var points = points || [new THREE.Vector3(Math.random() * 10, Math.random() * 10, Math.random() * 10), new THREE.Vector3(0, 0, 0)];
var _lineId = particles.vertices.indexOf(points[0]) + '-' + particles.vertices.indexOf(points[1]);
var lineGeom = new THREE.Geometry();
if (!lines[_lineId]) {
lineGeom.dynamic = true;
lineGeom.vertices.push(points[0]);
lineGeom.vertices.push(points[1]);
var curLine = new THREE.Line(lineGeom, lineMaterial);
curLine.touched = false;
lines[_lineId] = curLine;
lineGroup.add(curLine);
return curLine;
} else {
return false;
}
}
function createLine(p1, p2) {
p1.nodes[particles.vertices.indexOf(p2)] = p2;
addLine([p1, p2]);
}
$(document).ready(function() {
init();
});
I am really close, but I am not sure if its optimized. There seem to be flickering lines, and sometimes a line just stays stuck in place.
So here are my thoughts. I clicked that all I have to do is make the Vector3 points of the lines equal to the relevant particle Vector3 points. I just need to update each lines geometry.verticesNeedUpdate = true;
Also, how I manage the lines is I create a unique ID using the indexes of the 2 points, e.g. lines['8-2'] = line
The problem you're actually trying to solve is that while looping through your list of points, you're doubling the number of successful matches.
Example:
Consider a list of points, [A, B, C, D]. Your looping tests each point against all other points. For this example, A and C are the only points close enough to be considered nearby.
During the first iteration, A vs. all, you find that A and C are nearby, so you add a line. But when you're doing your iteration for C, you also find that A is nearby. This causes the second line, which you want to avoid.
Fixing it:
The solution is simple: Don't re-visit nodes you already checked. This works because the answer of distance from A to C is no different from distance from C to A.
The best way to do this is adjust your indexing for your check loop:
// (Note: This is example code, and won't "just work.")
for(var check = 0, checkLength = nodes.length; check < checkLength; ++check){
for(var against = check + 1, against < checkLength; ++against){
if(nodes[check].distanceTo(nodes[against]) < delta){
buildThatLine(nodes[check], nodes[against]);
}
}
}
In the inner loop, the indexing is set to:
Skip the current node
Skip all nodes before the current node.
This is done by initializing the inner indexing to the outer index + 1.
Caveat:
This particular logic assumes that you discard all your lines for every frame. It's not the most efficient way to achieve the effect, but I'll leave making it more efficient as an exercise for you.
I am using three.js to draw an image on the canvas, collect data from this image (i.e. pixel color) and redraw the image as a collection of particles using the data collected from the image, such as the colors.
I have zero error messages or warnings, just a blank, black canvas.
The code I am using is below:
var xhr = new XMLHttpRequest();
xhr.open("GET", "http://example.com/assets/css/sl.jpg");
xhr.responseType = "blob";
xhr.onload = function()
{
blob = xhr.response;
P.readAsDataURL(blob);
P.onload = function(){
image = document.createElement("img");
image.src = P.result;
setTimeout(function(){
// split the image
addParticles();
}, 100);
}
}
xhr.send();
addLights();
update();
setTimeout(function(){
holdAtOrigin = "next";
},1000)
function addParticles()
{
// draw in the image, and make sure it fits the canvas size :)
var ratio = 1 / Math.max(image.width/500, image.height/500);
var scaledWidth = image.width * ratio;
var scaledHeight = image.height * ratio;
context.drawImage(image,
0,0,image.width,image.height,
(500 - scaledWidth) * .5, (500 - scaledHeight) *.5, scaledWidth, scaledHeight);
// now set up the particle material
var material = new THREE.MeshPhongMaterial( { } );
var geometry = new THREE.Geometry();
var pixels = context.getImageData(0,0,WIDTH,HEIGHT);
var step = DENSITY * 4;
var x = 0, y = 0;
// go through the image pixels
for(x = 0; x < WIDTH * 4; x+= step)
{
for(y = HEIGHT; y >= 0 ; y -= DENSITY)
{
var p = ((y * WIDTH * 4) + x);
// grab the actual data from the
// pixel, ignoring any transparent ones
if(pixels.data[p+3] > 0)
{
var pixelCol = (pixels.data[p] << 16) + (pixels.data[p+1] << 8) + pixels.data[p+2];
var color = new THREE.Color(pixelCol);
var vector = new THREE.Vector3(-300 + x/4, 240 - y, 0);
// push on the particle
geometry.vertices.push(new THREE.Vector3(vector));
geometry.colors.push(color);
}
}
}
// now create a new system
particleSystem = new THREE.Points(geometry, material);
console.log(particleSystem);
particleSystem.sortParticles = true;
// grab a couple of cacheable vals
particles = particleSystem.geometry.vertices;
colors = particleSystem.geometry.colors;
// add some additional vars to the
// particles to ensure we can do physics
// and so on
var ps = particles.length;
while(ps--)
{
var particle = particles[ps];
particle.velocity = new THREE.Vector3();
particle.mass = 5;
particle.origPos = particle.x.clone();
}
// gc and add
pixels = null;
scene.add(particleSystem);
//test render
}
function addLights()
{
// point
pointLight = new THREE.PointLight( 0xFFFFFF );
pointLight.position.x = 300;
pointLight.position.y = 300;
pointLight.position.z = 600;
scene.add( pointLight );
// directional
directionalLight = new THREE.DirectionalLight( 0xFFFFFF );
directionalLight.position.x = -.5;
directionalLight.position.y = -1;
directionalLight.position.z = -.5;
directionalLight.position.normalize();
directionalLight.intensity = .6;
scene.add( directionalLight );
}
function update(){
var ps = particles.length;
while(ps--)
{
var particle = particles[ps];
// if we are holding at the origin
// values, tween the particles back
// to where they should be
if(holdAtOrigin == "start")
{
particle.velocity = new THREE.Vector3();
//particle.position.x += (particle.origPos.x - particle.position.x) * .2;
//particle.position.y += (particle.origPos.y - particle.position.y) * .2;
//particle.position.z += (particle.origPos.z - particle.position.z) * .2;
particle.x.x += (particle.origPos.x - .0000000000000000001) * 2;
particle.x.y += (particle.origPos.y - .0000000000000000001) * 2;
}
else if (holdAtOrigin == "next")
{
particle.velocity = new THREE.Vector3();
particle.x.x += (particle.origPos.x - particle.x.x) * .2;
particle.x.y += (particle.origPos.y - particle.x.y) * .2;
particle.x.z += (particle.origPos.z - particle.x.z) * .2;
}
else{
// get the particles colour and put
// it into an array
var col = colors[ps];
var colArray = [col.r, col.g, col.b];
// go through each component colour
for(var i = 0; i < colArray.length; i++)
{
// only analyse it if actually
// has some of this colour
if(colArray[i] > 0)
{
// get the target based on where it
// is in the array
var target = i == 0 ? redCentre :
i == 1 ? greenCentre :
blueCentre;
// get the distance of the particle to the centre in question
// and add on the resultant acceleration
var dist = particle.position.distanceToSquared(target.position),
force = ((particle.mass * target.mass) / dist) * colArray[i] * AGGRESSION,
acceleration = (new THREE.Vector3())
.sub(target.position,particle.position)
.normalize()
.multiplyScalar(force);
// if we are attracting we add
// the velocity
if(mode == ATTRACT)
{
// note we only need to check the
// squared radius for the collision :)
if(dist > target.boundRadiusSquared) {
particle.velocity.addSelf(acceleration);
}
else if (bounceParticles) {
// bounce, bounce, bounce
particle.velocity.negate();
}
else {
// stop dead
particle.velocity = new THREE.Vector3();
}
}
else {
// push it away
particle.velocity.subSelf(acceleration);
}
particle.position.addSelf(particle.velocity);
}
}
}
}
// set up a request for a render
requestAnimationFrame(update);
render();
}
function render()
{
// only render if we have
// an active image
if(image) {
if(holdAtOrigin=="start")
{
camera.position.z = 900;
}
if(camera.position.z < 200)
{
//do nothing
}
else{
camera.position.z -= 1.7;
};
renderer.render( scene, camera );
}
}
I checked the console log at various intervals and found that the pixel data is being collected appropriately, so I don't know what is wrong.
Is it the material? When I used a normal (light-independent) material the code worked as expected, I could see my particles.
But I wanted it to be affected by lights, so I changed it to var material = new THREE.MeshPhongMaterial( { } ); without any arguments.
Is this my problem or is it elsewhere in the code?
Thank you!
This may also be pertinent: How to get the absolute position of a vertex in three.js?
Because particle.x.x or particle.x.y doesn't look right to me, even though I wrote that code based on logged object contents.
EDIT: I changed the Phong line to THREE.PointsMaterial and amped up the potency of the light, but still a blank, black canvas.
EDIT 2: So I think it may be a problem with the particle coordinates being misconstrued? When I inspect using console.log(particleSystem); I get the following:
Did I used to be that the x,y,z were wrapped in a position property that newer versions of three.js have removed?
For example I've found example code like:
particle.origPos = particle.position.clone();
But I don't see a position property? How would I clone just the x,y and z bits or should I clone the whole vertex? Sorry if this is confusing or irrelevant just trying to chase down why I have a blank canvas.
EDIT 3: I've removed the update function's position alterations but I still get a weird console log for the particle-system even when all I am doing is cloning said particle using particle.origPos = particle.clone();
Basically I have and x,y and z property but the x property is an object with a subsequent x,y and z. Why is this and how do I fix?