three.js camera rotating by touch events and deviceorientation event - javascript

I'm trying to build a webapp that let users watch 360 panorama images with three.js, but some of the code isn't working as expected. The problem involves rotating the camera.
I'm able to rotate the camera by listening to either touch events or deviceorientation events but I can't manage to make both work simultaneously.
Currently, the camera rotates by touch gesture, but it snaps back immediately after detecting a device orientation event.
I want the camera to rotate with touch gestures and then rotate with device orientation starting from the rotation it was previously set by touch.
I think I should improve deviceorientation event handler or setQuaternion method below in add-gestures.js but I don't know how.
index.html
<!doctype html>
<html>
<head>
<meta name="viewport" content="width=device-width,initial-scale=1">
<link rel="stylesheet" href="pages/vr/style.css') ?>
</head>
<body>
<canvas id="vr-canvas"></canvas>
<script src="/js/lib/threejs/r104/build/three.min.js"></script>
<script src="/js/pages/vr/init-vr.js"></script>
<script src="/js/pages/vr/add-gestures.js"></script>
<script src="/js/pages/vr/add-sphere.js"></script>
</body>
</html>
init-vr.js
window.VRApp = window.VRApp || {};
const canvas = document.querySelector("#vr-canvas");
const renderer = (() => {
const webGLRenderer = new THREE.WebGLRenderer({ canvas });
webGLRenderer.setPixelRatio(window.devicePixelRatio);
return webGLRenderer;
})();
const scene = new THREE.Scene();
const camera = (() => {
const perspectiveCamera = new THREE.PerspectiveCamera(100, canvas.width / canvas.height, 0.01, 100);
perspectiveCamera.rotation.order = "ZYX";
return perspectiveCamera;
})();
const animate = () => {
requestAnimationFrame(animate);
renderer.render(scene, camera);
};
animate();
window.VRApp.renderer = renderer;
window.VRApp.scene = scene;
window.VRApp.camera = camera;
add-gestures.js
window.VRApp = window.VRApp || {};
const State = {
Neutral: 0x0000,
RotateCamera: 0x0001,
};
let state = State.Neutral;
let windowOrientation = window.orientation || 0;
let cameraRotationCache = window.VRApp.camera.rotation.clone();
let mousePositionCache = {
x: 0,
y: 0,
minYDiff: 0,
maxYDiff: 0,
};
const setState = (newState) => {
if (State.hasOwnProperty(newState)) {
state = State[newState];
}
};
const checkState = (targetState) => {
if (State.hasOwnProperty(targetState)) {
return state === State[targetState];
}
return false;
};
const setQuaternion = (() => {
const zee = new THREE.Vector3(0, 0, 1);
const euler = new THREE.Euler();
const q0 = new THREE.Quaternion();
const q1 = new THREE.Quaternion(-1 * Math.sqrt(0.5), 0, 0, Math.sqrt(0.5));
return (alpha, beta, gamma, orientation) => {
euler.set(beta, alpha, -1 * gamma, "YXZ");
window.VRApp.camera.quaternion.setFromEuler(euler);
window.VRApp.camera.quaternion.multiply(q1);
window.VRApp.camera.quaternion.multiply(q0.setFromAxisAngle(zee, -1 * orientation));
};
})();
const onMouseDownHandler = (clientX, clientY) => {
setState("RotateCamera");
cameraRotationCache = window.VRApp.camera.rotation.clone();
mousePositionCache.x = clientX;
mousePositionCache.y = clientY;
mousePositionCache.minYDiff = -90 - (cameraRotationCache.x * (180 / Math.PI)) - (clientY * (Math.PI / 180));
mousePositionCache.maxYDiff = 90 - (cameraRotationCache.x * (180 / Math.PI)) - (clientY * (Math.PI / 180));
};
const onMouseMoveHandler = (clientX, clientY) => {
if (checkState("RotateCamera")) {
window.VRApp.camera.rotation.order = "ZYX";
let xDiff = clientX - mousePositionCache.x;
let yDiff = clientY - mousePositionCache.y;
if (yDiff < mousePositionCache.minYDiff) {
yDiff = mousePositionCache.minYDiff;
mousePositionCache.y = clientY - mousePositionCache.minYDiff;
}
if (yDiff > mousePositionCache.maxYDiff) {
yDiff = mousePositionCache.maxYDiff;
mousePositionCache.y = clientY - mousePositionCache.maxYDiff;
}
let newAngleX = cameraRotationCache.x + (yDiff * (Math.PI / 180));
let newAngleY = cameraRotationCache.y + (xDiff * (Math.PI / 180));
window.VRApp.camera.rotation.x = newAngleX;
window.VRApp.camera.rotation.y = newAngleY;
}
};
const onMouseUpHandler = () => {
setState("Neutral");
cameraRotationCache = window.VRApp.camera.rotation.clone();
mousePositionCache.x = 0;
mousePositionCache.y = 0;
mousePositionCache.minYDiff = 0;
mousePositionCache.maxYDiff = 0;
};
if ("onresize" in window) {
window.addEventListener("resize", (event) => {
const width = window.innerWidth;
const height = window.innerHeight;
window.VRApp.renderer.domElement.width = width;
window.VRApp.renderer.domElement.height = height;
window.VRApp.renderer.domElement.style.height = height + "px";
window.VRApp.renderer.setSize(width, height);
window.VRApp.camera.aspect = width / height;
window.VRApp.camera.updateProjectionMatrix();
});
}
if ("onload" in window) {
window.addEventListener("load", (event) => {
const width = window.innerWidth;
const height = window.innerHeight;
window.VRApp.renderer.domElement.width = width;
window.VRApp.renderer.domElement.height = height;
window.VRApp.renderer.domElement.style.height = height + "px";
window.VRApp.renderer.setSize(width, height);
window.VRApp.camera.aspect = width / height;
window.VRApp.camera.updateProjectionMatrix();
});
}
if ("onmousedown" in window.VRApp.renderer.domElement) {
window.VRApp.renderer.domElement.addEventListener("mousedown", (event) => {
onMouseDownHandler(event.clientX, event.clientY);
});
}
if ("onmousemove" in window.VRApp.renderer.domElement) {
window.VRApp.renderer.domElement.addEventListener("mousemove", (event) => {
onMouseMoveHandler(event.clientX, event.clientY);
});
}
if ("onmouseup" in window.VRApp.renderer.domElement) {
window.VRApp.renderer.domElement.addEventListener("mouseup", (event) => {
onMouseUpHandler();
});
}
if ("onmouseleave" in window.VRApp.renderer.domElement) {
window.VRApp.renderer.domElement.addEventListener("mouseleave", (event) => {
onMouseUpHandler();
});
}
if ("ontouchstart" in window.VRApp.renderer.domElement) {
window.VRApp.renderer.domElement.addEventListener("touchstart", (event) => {
event.preventDefault();
if (event.touches.length === 1) {
const touch = event.touches[0];
onMouseDownHandler(touch.clientX, touch.clientY);
}
});
}
if ("ontouchmove" in window.VRApp.renderer.domElement) {
window.VRApp.renderer.domElement.addEventListener("touchmove", (event) => {
event.preventDefault();
if (event.touches.length === 1) {
const touch = event.touches[0];
onMouseMoveHandler(touch.clientX, touch.clientY);
}
});
}
if ("ontouchend" in window.VRApp.renderer.domElement) {
window.VRApp.renderer.domElement.addEventListener("touchend", (event) => {
event.preventDefault();
onMouseUpHandler();
});
}
if ("ontouchcancel" in window.VRApp.renderer.domElement) {
window.VRApp.renderer.domElement.addEventListener("touchcancel", (event) => {
event.preventDefault();
onMouseUpHandler();
});
}
if ("onorientationchange" in window) {
window.addEventListener("orientationchange", (event) => {
windowOrientation = window.orientation || 0;
});
}
if ("ondeviceorientation" in window) {
window.addEventListener("deviceorientation", (event) => {
if (checkState("Neutral")) {
let alpha = event.alpha * (Math.PI / 180);
let beta = event.beta * (Math.PI / 180);
let gamma = event.gamma * (Math.PI / 180);
let orientation = windowOrientation * (Math.PI / 180);
setQuaternion(alpha, beta, gamma, orientation);
}
});
}
add-sphere.js
window.VRApp = window.VRApp || {};
const sphere = (() => {
const geometry = new THREE.SphereGeometry(100, 64, 64);
geometry.scale(1, 1, -1);
geometry.rotateY(Math.PI / 2);
const material = new THREE.MeshBasicMaterial({
});
const mesh = new THREE.Mesh(geometry, material);
return mesh;
})();
const textureLoader = new THREE.TextureLoader();
const texture = textureLoader.load("/img/pages/vr/sample-360.jpg");
sphere.material.map = texture;
window.VRApp.scene.add(sphere);

Related

Problème MVC three.js

I have a threejs code that allows me to create a virtual tour, I want to implement an mvc method but for now I have some error that I can't solve.
Here is what I did
Scene.js
import * as THREE from 'three'
import { TweenLite } from '/gsap/src/all.js'
import { init } from '/client.js';
let scene;
class Scene {
constructor(image, camera) {
this.image = image
this.points = []
this.sprites = []
this.scene = null
this.camera = camera
}
//Création de la scène
createScene(scene) {
this.scene = scene
const geometry = new THREE.SphereGeometry(11, 32, 32)
const texture = new THREE.TextureLoader().load(this.image)
texture.wrapS = THREE.RepeatWrapping
texture.repeat.x = -1
texture.minFilter = THREE.LinearFilter;
const material = new THREE.MeshBasicMaterial({
map: texture,
side: THREE.DoubleSide
})
material.transparent = true
this.sphere = new THREE.Mesh(geometry, material)
this.scene.add(this.sphere)
this.points.forEach(this.addTooltip.bind(this))
}
addPoint(point) {
this.points.push(point)
}
//Création du tooltip
addTooltip = function (point) {
var loader = new THREE.TextureLoader();
let spriteMap = loader.load('../photo/' + point.image, (texture) => {
let spriteMaterial = new THREE.SpriteMaterial({
map: spriteMap
})
let sprite = new THREE.Sprite(spriteMaterial)
sprite.name = point.name;
sprite.position.copy(point.position.clone().normalize().multiplyScalar(10));
sprite.scale.multiplyScalar(2)
this.scene.add(sprite);
this.sprites.push(sprite);
if (point.scene !== false) {
sprite.onClick = () => {
this.destroy();
point.scene.destroy(scene);
point.scene.createScene(scene);
TweenLite.to(this.sphere.material, 1, {
opacity: 0,
onComplete: () => {
this.scene.remove(this.sphere)
}
})
}
} else {
sprite.onClick = () => { }
}
})
}
//Destruction des tooltip/scene a chaque changement de scene
destroy() {
this.sprites.forEach((sprite) => {
this.sprites.forEach((sprite) => {
TweenLite.to(sprite.scale, 1, {
x: 0,
y: 0,
z: 0,
onComplete: () => {
this.scene.remove(sprite)
}
})
})
})
}
//Apparition des nouveaux point de scene
appear() {
this.sprites.forEach((sprite) => {
sprite.scale.set(0, 0, 0)
TweenLite.to(sprite.scale, 1, {
x: 1,
y: 3,
z: 3
})
})
}
}
export default Scene;
Client.Js
import * as THREE from 'three'
import { OrbitControls } from './jsm/controls/OrbitControls.js'
import Scene from '/js/scene.js';
window.addEventListener('load', () => {
init();
})
export function init() {
const tooltip = document.querySelector('#tooltip')
let renderer
let scene;
const container = document.body
scene = new THREE.Scene()
const camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.1, 100)
camera.position.z = 2
renderer = new THREE.WebGLRenderer({
canvas: document.querySelector('#world'),
});
renderer.setSize(window.innerWidth, window.innerHeight)
document.body.appendChild(renderer.domElement)
const controls = new OrbitControls(camera, renderer.domElement)
controls.maxDistance = 3;
controls.minDistance = 0.9;
controls.rotateSpeed = -0.3
controls.enableZoom = true
controls.enablePan = false
controls.autoRotate = true
controls.autoRotateSpeed = 0.1
controls.enableDamping = true;
controls.dampingFactor = 0.3;
camera.position.set(1, 0, 0)
controls.update()
let s = new Scene('photo/entre.jpg', camera)
let s2 = new Scene('photo/entre.jpg', camera)
s.addPoint({
position: new THREE.Vector3(-10.468942480245712, -1.467960149500938, -2.8200827216367097),
name: '',
scene: s2,
image: '/logo/hall.png'
})
s.createScene(scene)
s.appear()
container.appendChild(renderer.domElement)
window.addEventListener(
'resize',
() => {
camera.aspect = window.innerWidth / window.innerHeight
camera.updateProjectionMatrix()
renderer.setSize(window.innerWidth, window.innerHeight)
render()
},
false
)
function animate() {
requestAnimationFrame(animate)
controls.update()
render()
}
function render() {
renderer.render(scene, camera)
}
animate()
const rayCaster = new THREE.Raycaster()
function onClick(e) {
let mouse = new THREE.Vector2(
(e.clientX / window.innerWidth) * 2 - 1,
-(e.clientY / window.innerHeight) * 2 + 1
)
rayCaster.setFromCamera(mouse, camera)
let intersects = rayCaster.intersectObjects(scene.children)
intersects.forEach(function (intersect) {
if (intersect.object.type === 'Sprite') {
intersect.object.onClick()
if (spriteActive) {
tooltip.classList.remove('is-active')
spriteActive = false
}
}
})
intersects = rayCaster.intersectObject(s.sphere)
if (intersects.length > 0) {
console.log(intersects[0].point)
}
let intersectes = rayCaster.intersectObjects(scene.children)
intersects.forEach(function (intersect) {
if (intersectes[0].object.type == "Sprite") {
gsap.to(camera.position, {
x: -intersectes[0].object.position.x,
y: 0,
z: -intersectes[0].object.position.z,
duration: 1.5,
ease: "power4.inOut",
})
}
})
}
function onMouseMove(e) {
let mouse = new THREE.Vector2(
(e.clientX / window.innerWidth) * 2 - 1,
-(e.clientY / window.innerHeight) * 2 + 1
)
rayCaster.setFromCamera(mouse, camera)
let foundSprite = false
let intersects = rayCaster.intersectObjects(scene.children)
intersects.forEach(function (intersect) {
if (intersect.object.name != '') {
let p = intersect.object.position.clone().project(camera)
tooltip.style.top = ((-1 * p.y + 1) * window.innerHeight / 2) + 'px'
tooltip.style.left = ((p.x + 1) * window.innerWidth / 2) + 'px'
tooltip.classList.add('is-active')
//Texte dans le tooltip
foundSprite = true
}
if (foundSprite) {
container.classList.add('hover')
controls.autoRotate = false
} else {
container.classList.remove('hover')
controls.autoRotate = true;
}
})
}
container.addEventListener('click', onClick)
container.addEventListener('mousemove', onMouseMove)
}
And here is the error I get
Uncaught TypeError: Cannot read properties of undefined (reading 'add')
at Scene.createScene (scene.js:30:20)
at sprite.onClick (scene.js:58:37)
at client.js:94:34
at Array.forEach (<anonymous>)
at HTMLBodyElement.onClick (client.js:91:20)
I've tried everything but no idea so far.
it seems to me that I can't get the scene variable which is in the init() or in the CreateScene method, I don't know
Thanks for your future help.
You should read through the error, it gives you lots of info of where the error is happening:
at Sprite.onClick, you're calling Scene.createScene, where you're calling .add() on and undefined variable.
So, looking through your code, now you can pinpoint where it's happening:
if (point.scene !== false) {
sprite.onClick = () => {
this.destroy();
point.scene.destroy(scene);
// 'scene' doesn't exist here, so it's undefined
point.scene.createScene(scene);

react/addEventListener mouseenter and mouseleave is firing twice every time

I am creating bee that is flying around canvas, I give random destination for it to reach, once it reaches it new destination is made, so it keeps on wandering. Now I am trying to make so that if my mouse gets close enough it would start to follow my mouse instead of destination point. Everything would be fine and I think I know how to do these, but problem is my Listeners are firing twice when using react, and only once in only Js. Edit: Looks like everything is duplicating, just tried drawing without clearing canvas, and I get two bees two destinations.
App
import Canvas from "./canvas/Canvas";
import "./App.css";
import { useRef, useEffect, useState } from "react";
function App() {
const appRef = useRef();
const [loaded, setLoaded] = useState(false);
useEffect(() => {
if (appRef) {
setLoaded(true);
}
return () => setLoaded(false);
}, []);
return (
<div className="App" ref={appRef}>
{loaded ? (
<Canvas
width={appRef.current.clientWidth}
height={appRef.current.clientHeight}
/>
) : (
<></>
)}
</div>
);
}
export default App;
Canvas
import Bee from "./bee";
import { useState, useCallback, useEffect } from "react";
import "./canvas.css";
const Canvas = ({ width, height }) => {
const [canvas, setCanvas] = useState();
const [context, setContext] = useState();
const canvasCallback = useCallback((node) => {
if (node) {
setCanvas(node);
}
}, []);
useEffect(() => {
if (canvas) {
setContext(canvas.getContext("2d"));
canvas.width = width;
canvas.height = height;
}
}, [canvas]);
if (context) {
// Game loop
// let fps = 60;
// let now;
// let then = Date.now();
// let delta;
const bee = new Bee(context, canvas.width, canvas.height, canvas);
const gameLoop = () => {
// let interval = 1000 / fps;
window.requestAnimationFrame(gameLoop);
// now = Date.now();
// delta = now - then;
// if (delta > interval) {
// then = now - (delta % interval);
bee.move();
// }
};
window.requestAnimationFrame(gameLoop);
//Game loop end
}
return <canvas id="canvas" ref={canvasCallback} />;
};
export default Canvas;
Bee
import beeImg from "./bee.png";
import beeLeftImg from "./bee-left.png";
function Bee(ctx, width, height, parent) {
this.currentPos = { x: width / 5, y: height / 3 };
this.beeWidth = 32;
this.beeHeight = 32;
this.velocity = { x: 0.1, y: 0 };
this.acc = { x: 0, y: 0 };
this.direction = { x: Math.random() * width, y: Math.random() * height };
this.mouse = { x: 0, y: 0 };
this.mouseInside = false;
let loadedR = false;
let loadedL = false;
let img = new Image();
img.src = beeImg;
img.onload = () => {
loadedR = true;
};
let imgLeft = new Image();
imgLeft.src = beeLeftImg;
imgLeft.onload = () => {
loadedL = true;
};
this.createBee = (x, y) => {
if (this.velocity.x >= 0 && loadedR) {
ctx.clearRect(0, 0, width, height, this.bee);
ctx.drawImage(img, x, y, this.beeHeight, this.beeHeight);
}
if (this.velocity.x < 0 && loadedL) {
ctx.clearRect(0, 0, width, height, this.bee);
ctx.drawImage(imgLeft, x, y, this.beeHeight, this.beeHeight);
}
};
window.addEventListener("mousemove", (e) => {
this.mouse = { x: e.clientX, y: e.clientY };
});
parent.addEventListener("mouseenter", (e) => {
console.log(e, "enter");
e.stopPropagation()
});
parent.addEventListener("mouseleave", (e) => {
console.log(e, "left");
e.stopPropagation()
});
let goTo = (x, y) => {
let v = {
x: this.currentPos.x - x,
y: this.currentPos.y - y,
};
let mag = Math.sqrt(Math.pow(v.x, 2) + Math.pow(v.y, 2));
let normalize = { x: v.x / mag, y: v.y / mag };
this.acc.x -= normalize.x;
this.acc.y -= normalize.y;
this.velocity.x += this.acc.x / 50;
this.velocity.y += this.acc.y / 50;
this.currentPos.x += this.velocity.x;
this.currentPos.y += this.velocity.y;
this.acc.x = 0;
this.acc.y = 0;
if (this.currentPos.x >= width - this.beeWidth) {
this.currentPos.x = width - this.beeWidth;
this.velocity.x = 0;
}
if (this.currentPos.x <= 0) {
this.currentPos.x = 0;
this.velocity.x = 0;
}
if (this.currentPos.y >= height - this.beeHeight) {
this.currentPos.y = height - this.beeHeight;
this.velocity.y = 0;
}
if (this.currentPos.y <= 0) {
this.currentPos.y = 0;
this.velocity.y = 0;
}
this.createBee(this.currentPos.x, this.currentPos.y);
ctx.beginPath();
ctx.arc(this.direction.x, this.direction.y, 10, 0, 2 * Math.PI);
ctx.stroke();
};
this.move = () => {
let mouseV = {
x: this.currentPos.x - this.mouse.x,
y: this.currentPos.y - this.mouse.y,
};
let mouseMag = Math.sqrt(Math.pow(mouseV.x, 2) + Math.pow(mouseV.y, 2));
if (mouseMag < 200) {
goTo(this.mouse.x, this.mouse.y);
} else {
let dirV = {
x: this.currentPos.x - this.direction.x,
y: this.currentPos.y - this.direction.y,
};
let dirMag = Math.sqrt(Math.pow(dirV.x, 2) + Math.pow(dirV.y, 2));
if (dirMag <= 100) {
this.direction = {
x: Math.random() * width,
y: Math.random() * height,
};
}
goTo(this.direction.x, this.direction.y);
}
};
}
export default Bee;
And this is in javascript single file, if anyone will want to test it, just add some image.src of your choice
if (canvas.getContext) {
const ctx = canvas.getContext("2d");
canvas.width = window.innerWidth;
canvas.height = window.innerHeight;
function Bee(ctx, width, height) {
this.currentPos = { x: width / 5, y: height / 3 };
this.beeWidth = 32;
this.beeHeight = 32;
this.velocity = { x: 0.1, y: 0 };
this.acc = { x: 0, y: 0 };
this.direction = { x: width / 2, y: height / 2 };
this.mouse = { x: 0, y: 0 };
this.createBee = (x, y) => {
// ctx.clearRect(0, 0, width, height, this.bee);
// ctx.beginPath();
// ctx.arc(x, y, 10, 0, 2 * Math.PI);
// ctx.stroke();
const img = new Image();
img.src = "./bee.png";
const imgLeft = new Image();
imgLeft.src = "./bee-left.png";
if (this.velocity.x >= 0) {
img.onload = () => {
ctx.clearRect(0, 0, width, height, this.bee);
ctx.drawImage(img, x, y, this.beeHeight, this.beeHeight);
};
}
if (this.velocity.x < 0) {
imgLeft.onload = () => {
ctx.clearRect(0, 0, width, height, this.bee);
ctx.drawImage(imgLeft, x, y, this.beeHeight, this.beeHeight);
};
}
};
window.addEventListener("mousemove", (e) => {
this.mouse = { x: e.clientX, y: e.clientY };
});
canvas.addEventListener("mouseenter", (e) => {
console.log(e, "enter");
// e.stopPropagation()
});
canvas.addEventListener("mouseleave", (e) => {
console.log(e, "left");
// e.stopPropagation()
});
let goTo = (x, y) => {
let v = {
x: this.currentPos.x - x,
y: this.currentPos.y - y,
};
let mag = Math.sqrt(Math.pow(v.x, 2) + Math.pow(v.y, 2));
let normalize = { x: v.x / mag, y: v.y / mag };
this.acc.x -= normalize.x;
this.acc.y -= normalize.y;
this.velocity.x += this.acc.x / 50;
this.velocity.y += this.acc.y / 50;
this.currentPos.x += this.velocity.x;
this.currentPos.y += this.velocity.y;
this.acc.x = 0;
this.acc.y = 0;
if (this.currentPos.x >= width - this.beeWidth) {
this.currentPos.x = width - this.beeWidth;
this.velocity.x = 0;
}
if (this.currentPos.x <= 0) {
this.currentPos.x = 0;
this.velocity.x = 0;
}
if (this.currentPos.y >= height - this.beeHeight) {
this.currentPos.y = height - this.beeHeight;
this.velocity.y = 0;
}
if (this.currentPos.y <= 0) {
this.currentPos.y = 0;
this.velocity.y = 0;
}
this.createBee(this.currentPos.x, this.currentPos.y);
ctx.beginPath();
ctx.arc(this.direction.x, this.direction.y, 10, 0, 2 * Math.PI);
ctx.stroke();
};
this.move = () => {
let mouseV = {
x: this.currentPos.x - this.mouse.x,
y: this.currentPos.y - this.mouse.y,
};
let mouseMag = Math.sqrt(Math.pow(mouseV.x, 2) + Math.pow(mouseV.y, 2));
if (mouseMag < 200) {
goTo(this.mouse.x, this.mouse.y);
} else {
let dirV = {
x: this.currentPos.x - this.direction.x,
y: this.currentPos.y - this.direction.y,
};
let dirMag = Math.sqrt(Math.pow(dirV.x, 2) + Math.pow(dirV.y, 2));
if (dirMag <= 100) {
this.direction = {
x: Math.random() * width,
y: Math.random() * height,
};
}
goTo(this.direction.x, this.direction.y);
}
};
}
const bee = new Bee(ctx, 700, 700);
// Game loop
let fps = 60;
let now;
let then = Date.now();
let delta;
const gameLoop = () => {
let interval = 1000 / fps;
window.requestAnimationFrame(gameLoop);
now = Date.now();
delta = now - then;
if (delta > interval) {
then = now - (delta % interval);
bee.move();
}
};
window.requestAnimationFrame(gameLoop);
//Game loop end
}
My first hypothesis if something like this happens: check if you added listeners twice or more. Also check how many times you creating Bee (add console.log in the beginning of Bee constructor).
I think if event listener was called twice then it was somehow added twice.
I am really confused why was this a problem, but once I changed Canvas.js file to this
const Canvas = ({ width, height }) => {
const [canvas, setCanvas] = useState();
const [context, setContext] = useState();
const [bee, setBee] = useState();
const canvasCallback = useCallback((node) => {
if (node) {
setCanvas(node);
}
}, []);
const runCanvas = () => {
const bee = new Bee(context, canvas.width, canvas.height, canvas)
setBee(bee);
// Game loop
let fps = 60;
let now;
let then = performance.now();
let delta;
const gameLoop = () => {
let interval = 1000 / fps;
now = performance.now();
delta = now - then;
if (delta > interval) {
then = now - (delta % interval);
// context.clearRect(0, 0, canvas.width, canvas.height);
bee.move();
}
window.requestAnimationFrame(gameLoop);
};
window.requestAnimationFrame(gameLoop);
//Game loop end
}
useEffect(() => {
if (canvas) {
setContext(canvas.getContext("2d"));
canvas.width = width;
canvas.height = height;
}
}, [canvas]);
useEffect(() => {
if(context){
runCanvas()
}
}, [context])
return <canvas id="canvas" ref={canvasCallback} />;
};
.It started to work properly.
If someone could explain reason, I would give correct answer.

Audio API center visualisation on frequency

I am working on an audio visualizer for the web that also lets the user "tune" the raw audio signal visualizer to a frequency. This is a feature of many hardware oscilloscopes. Basically, when a user centers on 440Hz and I have a 440Hz sine wave, the wave should stay still on the canvas and not move left or right. My plan was to move the graph to the left according to the frequency (440Hz = 1/440s to the left per second because the wave should repeat every 1/440s), but this does not work as it seems.
I could not find the units used by the Audio Analyzer Node's time domain data. I guess that it's in milliseconds, but I am not certain.
"use strict";
// Oscillator instead of mic for debugging
const USE_OSCILLATOR = true;
// Compatibility
if (!window.AudioContext)
window.AudioContext = window.webkitAudioContext;
if (!navigator.getUserMedia)
navigator.getUserMedia =
navigator.mozGetUserMedia ||
navigator.webkitGetUserMedia ||
navigator.msGetUserMedia;
// Main
class App {
constructor(visualizerElement, optionsElement) {
this.visualizerElement = visualizerElement;
this.optionsElement = optionsElement;
// HTML elements
this.canvas = document.createElement("canvas");
// Context
this.context = new AudioContext({
// Low latency
latencyHint: "interactive",
});
this.canvasCtx = this.canvas.getContext("2d", {
// Low latency
desynchronized: true,
alpha: false,
});
// Audio nodes
this.audioAnalyser = this.context.createAnalyser();
this.audioBuffer = new Uint8Array(this.audioAnalyser.frequencyBinCount);
this.audioInputStream = null;
this.audioInputNode = null;
if (this.canvasCtx === null)
throw new Error("2D rendering Context not supported by browser.");
this.updateCanvasSize();
window.addEventListener("resize", () => this.updateCanvasSize());
this.drawVisualizer();
this.visualizerElement.appendChild(this.canvas);
if (USE_OSCILLATOR) {
let oscillator = this.context.createOscillator();
oscillator.type = "sine";
oscillator.frequency.setValueAtTime(440, this.context.currentTime);
oscillator.connect(this.audioAnalyser);
oscillator.start();
}
else {
navigator.getUserMedia({ audio: true }, (stream) => {
this.audioInputStream = stream;
this.audioInputNode = this.context.createMediaStreamSource(stream);
this.audioInputNode.channelCountMode = "explicit";
this.audioInputNode.channelCount = 1;
this.audioBuffer = new Uint8Array(this.audioAnalyser.frequencyBinCount);
this.audioInputNode.connect(this.audioAnalyser);
}, (err) => console.error(err));
}
}
updateCanvasSize() {
var _a;
this.canvas.width = window.innerWidth;
this.canvas.height = window.innerHeight;
(_a = this.canvasCtx) === null || _a === void 0 ? void 0 : _a.setTransform(1, 0, 0, -1, 0, this.canvas.height * 0.5);
}
drawVisualizer() {
if (this.canvasCtx === null)
return;
const ctx = this.canvasCtx;
ctx.fillStyle = "black";
ctx.fillRect(0, -0.5 * this.canvas.height, this.canvas.width, this.canvas.height);
// Draw FFT
this.audioAnalyser.getByteFrequencyData(this.audioBuffer);
const step = this.canvas.width / this.audioBuffer.length;
const scale = this.canvas.height / (2 * 255);
ctx.beginPath();
ctx.moveTo(-step, this.audioBuffer[0] * scale);
this.audioBuffer.forEach((sample, index) => {
ctx.lineTo(index * step, scale * sample);
});
ctx.strokeStyle = "white";
ctx.stroke();
// Get the highest dominant frequency
let highestFreqHalfHz = 0;
{
/**
* Highest frequency in 0.5Hz
*/
let highestFreq = NaN;
let highestFreqAmp = NaN;
let remSteps = NaN;
for (let i = this.audioBuffer.length - 1; i >= 0; i--) {
const sample = this.audioBuffer[i];
if (sample > 20 && (isNaN(highestFreqAmp) || sample > highestFreqAmp)) {
highestFreq = i;
highestFreqAmp = sample;
if (isNaN(remSteps))
remSteps = 500;
}
if (!isNaN(remSteps)) {
if (remSteps-- < 0)
break;
}
}
if (!isNaN(highestFreq)) {
ctx.beginPath();
ctx.moveTo(highestFreq * step, 0);
ctx.lineTo(highestFreq * step, scale * 255);
ctx.strokeStyle = "green";
ctx.stroke();
highestFreqHalfHz = highestFreq;
}
}
// Draw Audio
this.audioAnalyser.getByteTimeDomainData(this.audioBuffer);
{
const bufferSize = this.audioBuffer.length;
const offsetY = -this.canvas.height * 0.5;
// I don't know what I am doing here:
const offsetX = highestFreqHalfHz == 0
? 0
: bufferSize -
Math.round(((this.context.currentTime * 1000) % (1 / 440)) % bufferSize);
// Draw the audio graph with the given offset
ctx.beginPath();
ctx.moveTo(-step, this.audioBuffer[0] * scale + offsetY);
for (let i = 0; i < bufferSize; i++) {
const index = (offsetX + i) % bufferSize;
const sample = this.audioBuffer[index];
ctx.lineTo(i * step, scale * sample + offsetY);
}
ctx.strokeStyle = "white";
ctx.stroke();
}
}
}
window.addEventListener("load", () => {
const app = new App(document.getElementById("visualizer"), document.getElementById("options"));
requestAnimationFrame(draw);
function draw() {
requestAnimationFrame(draw);
app.drawVisualizer();
}
});
html {
background: black;
}
body {
width: 100vw;
height: 100vh;
margin: 0;
overflow: hidden;
}
#visualizer {
position: fixed;
inset: 0;
}
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Equalizer</title>
</head>
<body>
<div id="visualizer"></div>
<div id="options"></div>
</body>
</html>
The above snippet was generated from TypeScript. You can find the source here. If it worked as intended, the oscillating graph (bottom) would not be moving.
I was able to solve this problem thanks to Raymond Toy's comment and my maths teacher (thank you Mr. Klein). The solution was Math.round((this.context.currentTime % iv) * sampleRate) where iv is the interval of the frequency (1/Hz). The wave is not perfectly centered. The FFT approximation is not very accurate though. In the following example I forced the detected frequency to be the specified one.
"use strict";
// Oscillator instead of mic for debugging
const USE_OSCILLATOR = true;
const OSCILLATOR_HZ = 1000;
// Compatibility
if (!window.AudioContext)
window.AudioContext = window.webkitAudioContext;
if (!navigator.getUserMedia)
navigator.getUserMedia =
navigator.mozGetUserMedia ||
navigator.webkitGetUserMedia ||
navigator.msGetUserMedia;
// Main
class App {
constructor(visualizerElement, optionsElement) {
this.visualizerElement = visualizerElement;
this.optionsElement = optionsElement;
// HTML elements
this.canvas = document.createElement("canvas");
// Context
this.context = new AudioContext({
// Low latency
latencyHint: "interactive",
});
this.canvasCtx = this.canvas.getContext("2d", {
// Low latency
desynchronized: true,
alpha: false,
});
// Audio nodes
this.audioAnalyser = this.context.createAnalyser();
this.audioBuffer = new Uint8Array(0);
this.audioInputStream = null;
this.audioInputNode = null;
if (this.canvasCtx === null)
throw new Error("2D rendering Context not supported by browser.");
this.updateCanvasSize();
window.addEventListener("resize", () => this.updateCanvasSize());
this.drawVisualizer();
this.visualizerElement.appendChild(this.canvas);
this.audioAnalyser.fftSize = 2048;
this.audioAnalyser.maxDecibels = -10;
this.audioBuffer = new Uint8Array(this.audioAnalyser.frequencyBinCount * 2);
this.audioFilter = this.context.createBiquadFilter();
this.audioFilter.type = "bandpass";
this.audioFilter.frequency.value = 900;
this.audioFilter.Q.value = 20;
this.audioAmplifier = this.context.createGain();
this.audioAmplifier.gain.value = 5;
this.audioFilter.connect(this.audioAmplifier);
this.audioAmplifier.connect(this.audioAnalyser);
if (USE_OSCILLATOR) {
let oscillator = this.context.createOscillator();
oscillator.type = "sine";
oscillator.frequency.setValueAtTime(OSCILLATOR_HZ, this.context.currentTime);
oscillator.connect(this.audioFilter);
oscillator.start();
}
else {
navigator.getUserMedia({ audio: true }, (stream) => {
this.audioInputStream = stream;
this.audioInputNode = this.context.createMediaStreamSource(stream);
this.audioInputNode.channelCountMode = "explicit";
this.audioInputNode.channelCount = 1;
this.audioBuffer = new Uint8Array(this.audioAnalyser.frequencyBinCount);
this.audioInputNode.connect(this.audioFilter);
}, (err) => console.error(err));
}
}
updateCanvasSize() {
var _a;
this.canvas.width = window.innerWidth;
this.canvas.height = window.innerHeight;
(_a = this.canvasCtx) === null || _a === void 0 ? void 0 : _a.setTransform(1, 0, 0, -1, 0, this.canvas.height * 0.5);
}
drawVisualizer() {
if (this.canvasCtx === null)
return;
const ctx = this.canvasCtx;
ctx.globalAlpha = 0.5;
ctx.fillStyle = "black";
ctx.fillRect(0, -0.5 * this.canvas.height, this.canvas.width, this.canvas.height);
ctx.globalAlpha = 1;
// Draw FFT
this.audioAnalyser.getByteFrequencyData(this.audioBuffer);
const scale = this.canvas.height / (2 * 255);
const { frequencyBinCount } = this.audioAnalyser;
const { sampleRate } = this.context;
{
const step = this.canvas.width / frequencyBinCount;
ctx.beginPath();
ctx.moveTo(-step, this.audioBuffer[0] * scale);
for (let index = 0; index < frequencyBinCount; index++) {
ctx.lineTo(index * step, scale * this.audioBuffer[index]);
}
ctx.strokeStyle = "white";
ctx.stroke();
}
// Get the highest dominant frequency
const step = this.canvas.width / frequencyBinCount;
let highestFreqHz = 0;
{
/**
* Highest frequency index in the buffer
*/
let highestFreqIndex = NaN;
let highestFreqAmp = NaN;
let remSteps = NaN;
for (let i = frequencyBinCount - 1; i >= 0; i--) {
const sample = this.audioBuffer[i];
if (sample > 30) {
if (isNaN(highestFreqAmp)) {
highestFreqIndex = i;
highestFreqAmp = sample;
}
else {
if (sample > highestFreqAmp) {
highestFreqIndex = i;
highestFreqAmp = sample;
}
}
//if (isNaN(remSteps)) remSteps = 100;
}
if (!isNaN(remSteps)) {
if (remSteps-- < 0)
break;
}
}
if (!isNaN(highestFreqIndex)) {
// Force exact value: (not necessary)
highestFreqIndex =
(OSCILLATOR_HZ * (2 * frequencyBinCount)) / sampleRate;
ctx.beginPath();
ctx.moveTo(highestFreqIndex * step, 0);
ctx.lineTo(highestFreqIndex * step, scale * 255);
ctx.strokeStyle = "green";
ctx.stroke();
highestFreqHz =
(highestFreqIndex * sampleRate) / (2 * frequencyBinCount);
window.HZ = highestFreqHz;
}
}
// Draw Audio
this.audioAnalyser.getByteTimeDomainData(this.audioBuffer);
{
const iv = highestFreqHz == 0 ? 0 : 1 / highestFreqHz;
const bufferSize = this.audioBuffer.length;
const offsetY = -this.canvas.height / 2.4;
const startIndex = Math.round(iv * sampleRate);
const step = this.canvas.width / (this.audioBuffer.length - startIndex);
const scale = this.canvas.height / (3 * 255);
const offsetX = highestFreqHz == 0
? 0
: Math.round((this.context.currentTime % iv) * sampleRate) %
bufferSize;
// Draw the audio graph with the given offset
ctx.beginPath();
ctx.moveTo(-step, this.audioBuffer[startIndex - offsetX] * scale + offsetY);
for (let i = startIndex; i < bufferSize; i += 4) {
const index = (i - offsetX) % bufferSize;
const sample = this.audioBuffer[index];
ctx.lineTo((i - startIndex) * step, scale * sample + offsetY);
}
ctx.strokeStyle = "white";
ctx.stroke();
}
}
}
window.addEventListener("load", () => {
const app = new App(document.getElementById("visualizer"), document.getElementById("options"));
requestAnimationFrame(draw);
function draw() {
requestAnimationFrame(draw);
app.drawVisualizer();
}
});
html {
background: black;
}
body {
width: 100vw;
height: 100vh;
margin: 0;
overflow: hidden;
}
#visualizer {
position: fixed;
inset: 0;
}
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Equalizer</title>
</head>
<body>
<div id="visualizer"></div>
<div id="options"></div>
</body>
</html>

Trying to simulate a 3D effect via Three.js

I'm trying to achieve something akin to this amazing effect : https://www.cobosrl.co/
Here's what I have so far : https://codepen.io/routsou/pen/ZEGWJgR?editors=0010
/*--------------------
Setup
--------------------*/
console.clear();
const canvas = document.querySelector('#bubble');
//wobble
let mouseDown = false;
let howMuch = 0;
let howMuchLimit = 0.25;
//ripple
let rippleAmount = 0;
let rippleRatio = 5;
let step = 0;
let sphereVerticesArray = [];
let sphereVerticesNormArray = [];
//raycaster
let raycaster;
let INTERSECTED = null;
let width = canvas.offsetWidth,
height = canvas.offsetHeight;
const renderer = new THREE.WebGLRenderer({
canvas: canvas,
antialias: true,
alpha: true
});
const scene = new THREE.Scene();
const setup = () => {
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize(width, height);
renderer.setClearColor(0xebebeb, 0);
renderer.shadowMap.enabled = true;
renderer.shadowMapSoft = true;
scene.fog = new THREE.Fog(0x000000, 10, 950);
const aspectRatio = width / height;
const fieldOfView = 100;
const nearPlane = 0.1;
const farPlane = 10000;
camera = new THREE.PerspectiveCamera(
fieldOfView,
aspectRatio,
nearPlane,
farPlane
);
raycaster = new THREE.Raycaster();
camera.position.x = 0;
camera.position.y = 0;
camera.position.z = 300;
}
setup();
/*--------------------
Lights
--------------------*/
let hemispshereLight, shadowLight, light2;
const createLights = () => {
hemisphereLight = new THREE.HemisphereLight(0xffffff,0x000000, .5)
shadowLight = new THREE.DirectionalLight(0x666666, .4);
shadowLight.position.set(0, 450, 350);
shadowLight.castShadow = true;
shadowLight.shadow.camera.left = -650;
shadowLight.shadow.camera.right = 650;
shadowLight.shadow.camera.top = 650;
shadowLight.shadow.camera.bottom = -650;
shadowLight.shadow.camera.near = 1;
shadowLight.shadow.camera.far = 1000;
shadowLight.shadow.mapSize.width = 4096;
shadowLight.shadow.mapSize.height = 4096;
light2 = new THREE.DirectionalLight(0x666666, .25);
light2.position.set(-600, 350, 350);
light3 = new THREE.DirectionalLight(0x666666, .15);
light3.position.set(0, -250, 300);
scene.add(hemisphereLight);
scene.add(shadowLight);
scene.add(light2);
scene.add(light3);
}
createLights();
/*--------------------
Bubble
--------------------*/
const vertex = width > 575 ? 80 : 40;
const bubbleGeometry = new THREE.SphereGeometry( 150, vertex, vertex );
let bubble;
const createBubble = () => {
for(let i = 0; i < bubbleGeometry.vertices.length; i++) {
let vector = bubbleGeometry.vertices[i];
vector.original = vector.clone();
}
const bubbleMaterial = new THREE.MeshStandardMaterial({
emissive: 0x91176b,
emissiveIntensity: 0.85,
roughness: 0.55,
metalness: 0.51,
side: THREE.FrontSide,
});
// save points for later calculation
for (var i = 0; i < bubbleGeometry.vertices.length; i += 1) {
var vertex = bubbleGeometry.vertices[i];
var vec = new THREE.Vector3(vertex.x, vertex.y, vertex.z);
sphereVerticesArray.push(vec);
var mag = vec.x * vec.x + vec.y * vec.y + vec.z * vec.z;
mag = Math.sqrt(mag);
var norm = new THREE.Vector3(vertex.x / mag, vertex.y / mag, vertex.z / mag);
sphereVerticesNormArray.push(norm);
}
bubble = new THREE.Mesh(bubbleGeometry, bubbleMaterial);
bubble.castShadow = true;
bubble.receiveShadow = false;
bubble.rotation.y = -90;
scene.add(bubble);
}
createBubble();
/*--------------------
Plane
--------------------*/
const createPlane = () => {
const planeGeometry = new THREE.PlaneBufferGeometry( 2000, 2000 );
const planeMaterial = new THREE.ShadowMaterial({
opacity: 0.15
});
const plane = new THREE.Mesh( planeGeometry, planeMaterial );
plane.position.y = -150;
plane.position.x = 0;
plane.position.z = 0;
plane.rotation.x = Math.PI / 180 * -90;
plane.receiveShadow = true;
scene.add(plane);
}
createPlane();
/*--------------------
Map
--------------------*/
const map = (num, in_min, in_max, out_min, out_max) => {
return (num - in_min) * (out_max - out_min) / (in_max - in_min) + out_min;
}
/*--------------------
Distance
--------------------*/
const distance = (a, b) => {
const dx = a.x - b.x;
const dy = a.y - b.y;
const d = Math.sqrt( dx * dx + dy * dy );
return d;
}
/*--------------------
Mouse
--------------------*/
let mouse = new THREE.Vector2(0, 0);
const onMouseMove = (e) => {
TweenMax.to(mouse, 0.8, {
x : ( e.clientX / window.innerWidth ) * 2 - 1,
y: - ( e.clientY / window.innerHeight ) * 2 + 1,
ease: Power2.easeOut
});
raycaster.setFromCamera( mouse, camera );
let intersects = raycaster.intersectObjects( scene.children );
try{
if ( intersects.length > 0 ) {
if ( INTERSECTED != intersects[ 0 ].object ) {
if ( INTERSECTED ) INTERSECTED.material.emissive.setHex( INTERSECTED.currentHex );
INTERSECTED = intersects[ 0 ].object;
INTERSECTED.currentHex = INTERSECTED.material.emissive.getHex();
INTERSECTED.material.emissive.setHex( 0x000000 );
document.body.style.cursor = 'pointer';
}
} else {
if ( INTERSECTED ) INTERSECTED.material.emissive.setHex( INTERSECTED.currentHex );
INTERSECTED = null;
document.body.style.cursor = 'auto';
}
}catch(e){
}
};
['mousemove', 'touchmove'].forEach(event => {
window.addEventListener(event, onMouseMove);
});
/*--------------------
Spring
--------------------*/
let spring = {
scale: 1
};
const clicking = {
down: () => {
mouseDown = true;
},
up: () => {
mouseDown = false;
}
};
['mousedown', 'touchstart'].forEach(event => {
window.addEventListener(event, clicking.down);
});
['mouseup', 'touchend'].forEach(event => {
window.addEventListener(event, clicking.up);
});
/*--------------------
Resize
--------------------*/
const onResize = () => {
canvas.style.width = '';
canvas.style.height = '';
width = canvas.offsetWidth;
height = canvas.offsetHeight;
camera.aspect = width / height;
camera.updateProjectionMatrix();
maxDist = distance(mouse, {x: width / 2, y: height / 2});
renderer.setSize(width, height);
}
let resizeTm;
window.addEventListener('resize', function(){
resizeTm = clearTimeout(resizeTm);
resizeTm = setTimeout(onResize, 200);
});
/*--------------------
Noise
--------------------*/
let dist = new THREE.Vector2(0, 0);
let maxDist = distance(mouse, {x: width / 2, y: height / 2});
const updateVertices = (time) => {
dist = distance(mouse, {x: width / 2, y: height / 2});
dist /= maxDist;
dist = map(dist, 1, 0, 0, 1);
for(let i = 0; i < bubbleGeometry.vertices.length; i++) {
let vector = bubbleGeometry.vertices[i];
vector.copy(vector.original);
let perlin = noise.simplex3(
(vector.x * 0.006) + (time * 0.0005),
(vector.y * 0.006) + (time * 0.0005),
(vector.z * 0.006)
);
let ratio = ((perlin * 0.3 * (howMuch + 0.1)) + 0.9);
vector.multiplyScalar(ratio);
}
bubbleGeometry.verticesNeedUpdate = true;
}
/*--------------------
Animate
--------------------*/
const render = (a) => {
step +=1;
requestAnimationFrame(render);
//bubble.scale.set(spring.scale, spring.scale, spring.scale);
updateVertices(a);
renderer.clear();
renderer.render(scene, camera);
//Activate on mouse down
if(mouseDown && howMuch < howMuchLimit)
howMuch += 0.01;
else if (howMuch > 0)
howMuch -= 0.01;
if(INTERSECTED){
if(rippleAmount < 10)
rippleAmount += 0.05;
}else if(rippleAmount > 0)
rippleAmount -= 0.05;
doRipple();
}
requestAnimationFrame(render);
renderer.render(scene, camera);
/*--------------------
Helpers
--------------------*/
function fbm(p) {
var result = noise.simplex3(p._x, p._y, p._z);
return result;
}
function addPoint(arr) {
var r = new Point(0, 0, 0);
var len = arr.length;
for (var i = 0; i < len; i += 1) {
r._x += arr[i]._x;
r._y += arr[i]._y;
r._z += arr[i]._z;
}
return r;
}
function Point(_x=0, _y=0, _z=0) {
this._x = _x;
this._y = _y;
this._z = _z;
}
function ripple(p) {
var q = new Point(fbm(addPoint([p, new Point(0, 0, 0)])),
fbm(addPoint([p, new Point(0, 1, 0)])),
fbm(addPoint([p, new Point(0, 0, 1)])));
return fbm(addPoint([p, new Point(0.5 * q._x, 0.5 * q._y, 0.5 * q._z)]));
}
function doRipple(){
//ripple
for (var i = 0; i < bubbleGeometry.vertices.length; i += 1) {
var vertex = bubbleGeometry.vertices[i];
// var value = pn.noise((vertex.x + step)/ 10, vertex.y / 10, vertex.z / 10);
var value = ripple(new Point((vertex.x + step) / 100.0), vertex.y / 100.0, vertex.z / 100.0);
vertex.x = sphereVerticesArray[i].x + sphereVerticesNormArray[i].x * value * rippleAmount;
vertex.y = sphereVerticesArray[i].y + sphereVerticesNormArray[i].y * value * rippleAmount;
vertex.z = sphereVerticesArray[i].z + sphereVerticesNormArray[i].z * value * rippleAmount;
}
bubbleGeometry.computeFaceNormals();
bubbleGeometry.computeVertexNormals();
bubbleGeometry.verticesNeedUpdate = true;
bubbleGeometry.normalsNeedUpdate = true;
}
Any help, particularly about the mouse pointer "sculpting the geometry", and the waves being more natural and from the pointer?
Thank you very much in advance
I've investigated and found you're intersecting with all children (6) in the scene, including the bubble shadow and the lights. The shadow seems to also intersect with the mouse triggering a false contact.
About "sculpting the geometry" I noticed you hardcode the ripple effect from one specific point of the bubble during initial construction and that's why the sculpting effect is always from that same point. This is my recommendation:
Remove the hard-coded sphereVerticesArray and sphereVerticesNormArray.
After computing the intersection with the mouse, find out the face of the bubble getting hit: intersections[0].point provides the point of intersection, in world coordinates. Use this to find out the face of contact.
During ripple effect use the normal of the contact face as starting point and orientation of the ripple.
This is the code to fix the shadow intersection issue including some comments:
/*--------------------
Setup
--------------------*/
console.clear();
const canvas = document.querySelector('#bubble');
//wobble
let mouseDown = false;
let howMuch = 0;
let howMuchLimit = 0.25;
//ripple
let rippleAmount = 0;
let rippleRatio = 5;
let step = 0;
let sphereVerticesArray = [];
let sphereVerticesNormArray = [];
//raycaster
let raycaster;
let isIntersectingWithBubble = false;
let width = canvas.offsetWidth,
height = canvas.offsetHeight;
const renderer = new THREE.WebGLRenderer({
canvas: canvas,
antialias: true,
alpha: true
});
const scene = new THREE.Scene();
const setup = () => {
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize(width, height);
renderer.setClearColor(0xebebeb, 0);
renderer.shadowMap.enabled = true;
renderer.shadowMapSoft = true;
scene.fog = new THREE.Fog(0x000000, 10, 950);
const aspectRatio = width / height;
const fieldOfView = 100;
const nearPlane = 0.1;
const farPlane = 10000;
camera = new THREE.PerspectiveCamera(
fieldOfView,
aspectRatio,
nearPlane,
farPlane
);
raycaster = new THREE.Raycaster();
camera.position.x = 0;
camera.position.y = 0;
camera.position.z = 300;
}
setup();
/*--------------------
Lights
--------------------*/
let hemispshereLight, shadowLight, light2;
const createLights = () => {
hemisphereLight = new THREE.HemisphereLight(0xffffff,0x000000, .5)
shadowLight = new THREE.DirectionalLight(0x666666, .4);
shadowLight.position.set(0, 450, 350);
shadowLight.castShadow = true;
shadowLight.shadow.camera.left = -650;
shadowLight.shadow.camera.right = 650;
shadowLight.shadow.camera.top = 650;
shadowLight.shadow.camera.bottom = -650;
shadowLight.shadow.camera.near = 1;
shadowLight.shadow.camera.far = 1000;
shadowLight.shadow.mapSize.width = 4096;
shadowLight.shadow.mapSize.height = 4096;
light2 = new THREE.DirectionalLight(0x666666, .25);
light2.position.set(-600, 350, 350);
light3 = new THREE.DirectionalLight(0x666666, .15);
light3.position.set(0, -250, 300);
scene.add(hemisphereLight);
scene.add(shadowLight);
scene.add(light2);
scene.add(light3);
}
createLights();
/*--------------------
Bubble
--------------------*/
const vertex = width > 575 ? 80 : 40;
const bubbleGeometry = new THREE.SphereGeometry( 150, vertex, vertex );
const bubbleEmissive = 0x91176b;
const bubbleEmissiveOnContact = 0x000000;
const createBubble = () => {
for(let i = 0; i < bubbleGeometry.vertices.length; i++) {
let vector = bubbleGeometry.vertices[i];
vector.original = vector.clone();
}
const bubbleMaterial = new THREE.MeshStandardMaterial({
emissive: bubbleEmissive,
emissiveIntensity: 0.85,
roughness: 0.55,
metalness: 0.51,
side: THREE.FrontSide,
});
// save points for later calculation
for (var i = 0; i < bubbleGeometry.vertices.length; i += 1) {
var vertex = bubbleGeometry.vertices[i];
var vec = new THREE.Vector3(vertex.x, vertex.y, vertex.z);
sphereVerticesArray.push(vec);
var mag = vec.x * vec.x + vec.y * vec.y + vec.z * vec.z;
mag = Math.sqrt(mag);
var norm = new THREE.Vector3(vertex.x / mag, vertex.y / mag, vertex.z / mag);
sphereVerticesNormArray.push(norm);
}
const _bubble = new THREE.Mesh(bubbleGeometry, bubbleMaterial);
_bubble.castShadow = true;
_bubble.receiveShadow = false;
_bubble.rotation.y = -90;
scene.add(_bubble);
return _bubble;
}
const bubble = createBubble();
/*--------------------
Plane
--------------------*/
const createPlane = () => {
const planeGeometry = new THREE.PlaneBufferGeometry( 2000, 2000 );
const planeMaterial = new THREE.ShadowMaterial({
opacity: 0.15
});
const plane = new THREE.Mesh( planeGeometry, planeMaterial );
plane.position.y = -150;
plane.position.x = 0;
plane.position.z = 0;
plane.rotation.x = Math.PI / 180 * -90;
plane.receiveShadow = true;
scene.add(plane);
}
createPlane();
/*--------------------
Map
--------------------*/
const map = (num, in_min, in_max, out_min, out_max) => {
return (num - in_min) * (out_max - out_min) / (in_max - in_min) + out_min;
}
/*--------------------
Distance
--------------------*/
const distance = (a, b) => {
const dx = a.x - b.x;
const dy = a.y - b.y;
const d = Math.sqrt( dx * dx + dy * dy );
return d;
}
/*--------------------
Mouse
--------------------*/
let mouse = new THREE.Vector2(0, 0);
const onMouseMove = (e) => {
TweenMax.to(mouse, 0.8, {
x : ( e.clientX / window.innerWidth ) * 2 - 1,
y: - ( e.clientY / window.innerHeight ) * 2 + 1,
ease: Power2.easeOut
});
raycaster.setFromCamera( mouse, camera );
isIntersectingWithBubble = raycaster.intersectObject( bubble ).length > 0; // we are only interested in intersections with the bubble object
try {
if (isIntersectingWithBubble) {
// is intersecting: change color, change pointer, change point of contact
bubble.material.emissive.setHex(bubbleEmissiveOnContact);
document.body.style.cursor = 'pointer';
} else {
// is not intersecting: restore color, restore pointer, remove point of contact
bubble.material.emissive.setHex(bubbleEmissive);
document.body.style.cursor = 'auto';
}
} catch (e) {
}
};
['mousemove', 'touchmove'].forEach(event => {
window.addEventListener(event, onMouseMove);
});
/*--------------------
Spring
--------------------*/
let spring = {
scale: 1
};
const clicking = {
down: () => {
mouseDown = true;
},
up: () => {
mouseDown = false;
}
};
['mousedown', 'touchstart'].forEach(event => {
window.addEventListener(event, clicking.down);
});
['mouseup', 'touchend'].forEach(event => {
window.addEventListener(event, clicking.up);
});
/*--------------------
Resize
--------------------*/
const onResize = () => {
canvas.style.width = '';
canvas.style.height = '';
width = canvas.offsetWidth;
height = canvas.offsetHeight;
camera.aspect = width / height;
camera.updateProjectionMatrix();
maxDist = distance(mouse, {x: width / 2, y: height / 2});
renderer.setSize(width, height);
}
let resizeTm;
window.addEventListener('resize', function(){
resizeTm = clearTimeout(resizeTm);
resizeTm = setTimeout(onResize, 200);
});
/*--------------------
Noise
--------------------*/
let dist = new THREE.Vector2(0, 0);
let maxDist = distance(mouse, {x: width / 2, y: height / 2});
const updateVertices = (time) => {
dist = distance(mouse, {x: width / 2, y: height / 2});
dist /= maxDist;
dist = map(dist, 1, 0, 0, 1);
for(let i = 0; i < bubbleGeometry.vertices.length; i++) {
let vector = bubbleGeometry.vertices[i];
vector.copy(vector.original);
let perlin = noise.simplex3(
(vector.x * 0.006) + (time * 0.0005),
(vector.y * 0.006) + (time * 0.0005),
(vector.z * 0.006)
);
let ratio = ((perlin * 0.3 * (howMuch + 0.1)) + 0.9);
vector.multiplyScalar(ratio);
}
bubbleGeometry.verticesNeedUpdate = true;
}
/*--------------------
Animate
--------------------*/
const render = (a) => {
step +=1;
requestAnimationFrame(render);
//bubble.scale.set(spring.scale, spring.scale, spring.scale);
updateVertices(a);
renderer.clear();
renderer.render(scene, camera);
//Activate on mouse down
if(mouseDown && howMuch < howMuchLimit)
howMuch += 0.01;
else if (howMuch > 0)
howMuch -= 0.01;
if(isIntersectingWithBubble){
if(rippleAmount < 10)
rippleAmount += 0.05;
}else if(rippleAmount > 0)
rippleAmount -= 0.05;
doRipple();
}
requestAnimationFrame(render);
renderer.render(scene, camera);
/*--------------------
Helpers
--------------------*/
function fbm(p) {
var result = noise.simplex3(p._x, p._y, p._z);
return result;
}
function addPoint(arr) {
var r = new Point(0, 0, 0);
var len = arr.length;
for (var i = 0; i < len; i += 1) {
r._x += arr[i]._x;
r._y += arr[i]._y;
r._z += arr[i]._z;
}
return r;
}
function Point(_x=0, _y=0, _z=0) {
this._x = _x;
this._y = _y;
this._z = _z;
}
function ripple(p) {
var q = new Point(fbm(addPoint([p, new Point(0, 0, 0)])),
fbm(addPoint([p, new Point(0, 1, 0)])),
fbm(addPoint([p, new Point(0, 0, 1)])));
return fbm(addPoint([p, new Point(0.5 * q._x, 0.5 * q._y, 0.5 * q._z)]));
}
function doRipple(){
//ripple
for (var i = 0; i < bubbleGeometry.vertices.length; i += 1) {
var vertex = bubbleGeometry.vertices[i];
// var value = pn.noise((vertex.x + step)/ 10, vertex.y / 10, vertex.z / 10);
var value = ripple(new Point((vertex.x + step) / 100.0), vertex.y / 100.0, vertex.z / 100.0);
vertex.x = sphereVerticesArray[i].x + sphereVerticesNormArray[i].x * value * rippleAmount;
vertex.y = sphereVerticesArray[i].y + sphereVerticesNormArray[i].y * value * rippleAmount;
vertex.z = sphereVerticesArray[i].z + sphereVerticesNormArray[i].z * value * rippleAmount;
}
bubbleGeometry.computeFaceNormals();
bubbleGeometry.computeVertexNormals();
bubbleGeometry.verticesNeedUpdate = true;
bubbleGeometry.normalsNeedUpdate = true;
}

Get Touch Event Relative To Under-Sized Frame Buffer Object

I coded a fluid simulation using shaders (I used THREE.js) for my webpage. I wanted it to run fast even on mobile, so I decided that I was going to simulate on a lower resolution (4 times smaller than the Render Target), I managed to get it working for Mouse Events, but I haven't been able to decipher how to properly scale Touch Events so that they matche real touch positions.
function handleMove(evt) {
evt.preventDefault();
var touches = evt.targetTouches;
var x = 0, y = 0;
if (BufferBUniforms.iMouse.value.z === 1) {
var element = document.getElementById("container").getBoundingClientRect();
var bodyRect = document.body.getBoundingClientRect();
var h = (element.top - bodyRect.top);
var w = (element.left - bodyRect.left);
// One way I tried.
x = ( touches[0].pageX - w ) / scaleMax;
y = height - ( touches[0].pageY - h ) / scaleMax;
// Another way I tried.
x = ( touches[0].pageX - w ) / scaleMax;
y = height - ( touches[0].pageY - h ) / scaleMaxO;
BufferAUniforms.iMouse.value.x = x;
BufferAUniforms.iMouse.value.y = y;
}
}
This is a snippet where I defined some of the variables mentioned above:
scale = window.devicePixelRatio;
renderer.setPixelRatio(scale);
container.appendChild(renderer.domElement);
height = window.innerHeight * 0.25;
height = THREE.Math.floorPowerOfTwo( height )
scaleMax = window.innerHeight / height;
width = window.innerWidth * 0.25;
width = THREE.Math.floorPowerOfTwo(width)
scaleRatio = width / height;
scaleMaxO = window.innerWidth / width;
renderer.setSize(width * scaleMax, height * scaleMaxO);
The thing is that it works when using Chrome Dev Tools on Mobile Emulator, but not when using a Samsung S9 Plus.
You can see the whole thing here
Assuming you're drawing that smaller render target to fill the canvas it should just be
const rect = renderer.domElement.getBoundingClientRect();
const x = (touches[0].clientX - rect.left) * renderTargetWidth / rect.width;
const y = (touches[0].clientY - rect.top ) * renderTargetHeight / rect.height;
Now X and Y are in pixels in the render target though you might want to flip Y
const y = renderTargetHeight -
(touches[0].clientY - rect.top) * renderTargetHeight / rect.height;
Note the mouse is no different. In fact this should work
function computeRenderTargetRelativePosition(e) {
const rect = renderer.domElement.getBoundingClientRect();
const x = (e.clientX - rect.left) * renderTargetWidth / rect.width;
const y = (e.clientY - rect.top ) * renderTargetHeight / rect.height;
return {x, y};
}
renderer.domElement.addEventListener('mousemove', (e) => {
const pos = computeRenderTargetRelativePosition(e);
... do something with pos...
});
renderer.domElement.addEventListener('touchmove', (e) => {
const pos = computeRenderTargetRelativePosition(e.touches[0]);
... do something with pos...
});
The only complication is if you apply CSS transforms then you need different code.
'use strict';
/* global THREE */
function main() {
const canvas = document.querySelector('#c');
const renderer = new THREE.WebGLRenderer({canvas});
const rtWidth = 24;
const rtHeight = 12;
const renderTarget = new THREE.WebGLRenderTarget(rtWidth, rtHeight);
const rtCamera = new THREE.OrthographicCamera(0, rtWidth, rtHeight, 0, -1, 1);
const rtScene = new THREE.Scene();
const boxWidth = 1;
const boxHeight = 1;
const boxDepth = 1;
const geometry = new THREE.BoxGeometry(boxWidth, boxHeight, boxDepth);
const rtMaterial = new THREE.MeshBasicMaterial({color: 'red'});
const rtCube = new THREE.Mesh(geometry, rtMaterial);
rtScene.add(rtCube);
const camera = new THREE.Camera();
const scene = new THREE.Scene();
const planeGeo = new THREE.PlaneBufferGeometry(2, 2);
const material = new THREE.MeshBasicMaterial({
//color: 'blue',
map: renderTarget.texture,
});
const plane = new THREE.Mesh(planeGeo, material);
scene.add(plane);
function resizeRendererToDisplaySize(renderer) {
const canvas = renderer.domElement;
const width = canvas.clientWidth;
const height = canvas.clientHeight;
const needResize = canvas.width !== width || canvas.height !== height;
if (needResize) {
renderer.setSize(width, height, false);
}
return needResize;
}
function render(time) {
time *= 0.001;
if (resizeRendererToDisplaySize(renderer)) {
const canvas = renderer.domElement;
}
// draw render target scene to render target
renderer.setRenderTarget(renderTarget);
renderer.render(rtScene, rtCamera);
renderer.setRenderTarget(null);
// render the scene to the canvas
renderer.render(scene, camera);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
function setPos(e) {
const rect = renderer.domElement.getBoundingClientRect();
const x = (e.clientX - rect.left) * rtWidth / rect.width;
const y = rtHeight - (e.clientY - rect.top ) * rtHeight / rect.height;
rtCube.position.set(x, y, 0);
}
renderer.domElement.addEventListener('mousemove', (e) => {
setPos(e);
});
renderer.domElement.addEventListener('touchmove', (e) => {
e.preventDefault();
setPos(e.touches[0]);
}, {passive: false});
}
main();
body {
margin: 0;
}
#c {
width: 100vw;
height: 100vh;
display: block;
}
<canvas id="c"></canvas>
<script src="https://threejsfundamentals.org/threejs/resources/threejs/r105/three.min.js"></script>

Categories

Resources