ThreeJS different shapes object texture not working true - javascript

I am using dynamic texture on threejs. When i use different shapes object, texture not working fine. But, When i use boxGeometry , it's working fine.
setCanvas =()=>{
this.canvas = window.document.getElementById("canvas");
var ctx = this.canvas.getContext("2d");
ctx.font = "20pt Arial";
ctx.fillStyle = "red";
ctx.fillRect(0, 0, this.canvas.width, this.canvas.height);
ctx.fillStyle = "white";
ctx.fillRect(10, 10, this.canvas.width - 20, this.canvas.height - 20);
ctx.fillStyle = "yellow";
ctx.textAlign = "center";
ctx.textBaseline = "middle";
ctx.fillText("asdasdasd", this.canvas.width / 2, this.canvas.height / 2);
}
different shapes object creator
threeObjectCreator(mesh, position, color) {
const object = this.objectLoader.parse(mesh);
object.position.set(position.x, position.z, position.y);
object.rotation.x = Math.PI / 2;
let texture = new THREE.Texture(this.canvas);
object.material = new THREE.MeshPhongMaterial({
color,
side: THREE.DoubleSide,
shininess: 30,
//flatShading: THREE.FlatShading,
transparent: false,
map:texture
});
texture.needsUpdate = true;
return object;
}
with box geometry
let geometry = new THREE.BoxGeometry( 200, 200, 200 );
let mesh = new THREE.Mesh(geometry, material);
this.scene.add(mesh);
How can i solve this problem ?

When using textures in OpenGL (ThreeJS uses this via WebGL), its important that you properly set the UV parameters on your faces.
These UV parameters tell how to morph and transform an 2d texture into 3d space.
Lets see the UV parameters on our simple box model (new THREE.BoxGeometry()):
[[
[{"x":0,"y":1},{"x":0,"y":0},{"x":1,"y":1}],
[{"x":0,"y":0},{"x":1,"y":0},{"x":1,"y":1}],
[{"x":0,"y":1},{"x":0,"y":0},{"x":1,"y":1}],
[{"x":0,"y":0},{"x":1,"y":0},{"x":1,"y":1}],
[{"x":0,"y":1},{"x":0,"y":0},{"x":1,"y":1}],
[{"x":0,"y":0},{"x":1,"y":0},{"x":1,"y":1}],
[{"x":0,"y":1},{"x":0,"y":0},{"x":1,"y":1}],
[{"x":0,"y":0},{"x":1,"y":0},{"x":1,"y":1}],
[{"x":0,"y":1},{"x":0,"y":0},{"x":1,"y":1}],
[{"x":0,"y":0},{"x":1,"y":0},{"x":1,"y":1}],
[{"x":0,"y":1},{"x":0,"y":0},{"x":1,"y":1}],
[{"x":0,"y":0},{"x":1,"y":0},{"x":1,"y":1}]
]]
This looks scary
To understand these coordinates, you have to understand that our box is made up of 12 "faces", or triangles in this case, 2 triangles per side.
Interesting things happen when we modify 1 of the coordinates in the UV grid:
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, 0.1, 1000 );
var renderer = new THREE.WebGLRenderer();
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
var geometry = new THREE.BoxGeometry( 3,3,3 );
var material = new THREE.MeshBasicMaterial( { color: 0xffffff } );
new THREE.TextureLoader().load( "https://threejs.org/examples/textures/uv_grid_opengl.jpg", (texture) => {
material.map = texture;
renderer.render( scene, camera );
});
geometry.faceVertexUvs =
[[
[{"x":0,"y":1},{"x":0,"y":-1},{"x":2,"y":1}],
[{"x":0,"y":0},{"x":1,"y":0},{"x":1,"y":1}],
[{"x":0,"y":1},{"x":0,"y":0},{"x":1,"y":1}],
[{"x":0,"y":0},{"x":1,"y":0},{"x":1,"y":1}],
[{"x":0,"y":1},{"x":0,"y":0},{"x":1,"y":1}],
[{"x":0,"y":0},{"x":1,"y":0},{"x":1,"y":1}],
[{"x":0,"y":1},{"x":0,"y":0},{"x":1,"y":1}],
[{"x":0,"y":0},{"x":1,"y":0},{"x":1,"y":1}],
[{"x":0,"y":1},{"x":0,"y":0},{"x":1,"y":1}],
[{"x":0,"y":0},{"x":1,"y":0},{"x":1,"y":1}],
[{"x":0,"y":1},{"x":0,"y":0},{"x":1,"y":1}],
[{"x":0,"y":0},{"x":1,"y":0},{"x":1,"y":1}]
]]
var cube = new THREE.Mesh( geometry, material );
cube.rotation.x = Math.PI / 3
cube.rotation.y = 0
cube.rotation.z = Math.PI / 3
scene.add( cube );
camera.position.z = 5;
body { margin: 0; font-size: 0; }
canvas { width: 100%; height: 100% }
<script src="https://threejs.org/build/three.js"></script>
Notice how the texture shifts? This is how UV coordinates help you apply a 2d texture to any 3d surface. (Try to also play around with Texture.wrapS)
The problem
At the moment, inside your code, you are loading your objects from an external source, most likely an OBJ file (based on the fact that you are using an ObjectLoader), and since an MAT file was not present, there is no information present about the properties of the materials.
This means that the objects you are trying to render to screen, don't have any "texture coordinates", and we need to "provide" this information in some way.
While its possible to hand craft this data, this isn't always so saleable, so we have to fallback to another alternative, generating this data based on the position of the item.
Generating UV coordinates is a complex topic, as there are many different mappings for this, and I recommend you find and use one of the solutions at "THREE.js generate UV coordinate - StackOverflow" for this.

Related

Three.js - how to create mirror image for the entire scene

so I'm building an AR headset that uses stereo rendering to create 3d images.
The only problem is that when the image reflects off the headset's reflectors it creates a flipped (mirror) image.
I need to take account for that in my Three.js code so I was wondering if there are 2 options:
Flip every 3d object in the scene
Or flip the camera to simulate an optically inverted (mirror image) camera
I'll post the basics of my code here:
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera( 75, window.innerWidth/window.innerHeight, 0.1, 1000 );
var renderer = new THREE.WebGLRenderer();
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
var geometry = new THREE.BoxGeometry();
var material = new THREE.MeshBasicMaterial( { color: 0x00ff00 } );
var cube = new THREE.Mesh( geometry, material );
scene.add( cube );
camera.position.z = 5;
const stereo = new THREE.StereoCamera();
function render() {
//stereo rendering code
camera.updateWorldMatrix();
stereo.update(camera);
const size = new THREE.Vector2();
renderer.getSize(size);
renderer.setScissorTest(true);
renderer.setScissor(0, 0, size.width / 2, size.height);
renderer.setViewport(0, 0, size.width / 2, size.height);
renderer.render(scene, stereo.cameraL);
renderer.setScissor(size.width / 2, 0, size.width / 2, size.height);
renderer.setViewport(size.width / 2, 0, size.width / 2, size.height);
renderer.render(scene, stereo.cameraR);
renderer.setScissorTest(false);
}
requestAnimationFrame(render);
I've also attached some images to explain what my goal is:
It might be easy to just flip the cube using a matrix but what if dozens of more 3d objects get added to the scene?
Ideally, I'd like it if the entire camera could be flipped to simulate a mirrored image for the entire scene with respect to the stereo camera (if possible).
I would appreciate any help. It's also worth noting that I've tried manually flipping the display through Window's display settings but that did not have the outcome I was looking for.
Could you just use CSS to flip the element you're using to render the scene?
<style>
canvas {
transform: scaleX(-1);
}
</style>

three.js reflector (mirror) clipping object when HDRi background is set

I have scene with elements size over 500 units and i want to create mirror effect for them. to Reach descripted effect i used Reflector library from three.js webgl_mirror example.
I placed mirror on ground and most of meshes disappears or showing only small parts of surface when i set background hdri without its displayes normally. I builded other scene for tests and it looks like this unexpected effect begins when distance between mirror and obiect is over around 75 units (sometimes its less i dont know what its depends).
Image to preview on that effect
Is there any possibility that i could increase range of this clipping box size for that mirror? (i really want to avoid of scaling my actual created scene)
What i already tryed:
-changing my perspective camera far and near distances. - no effect
-manipulate paramets for clipBias and recursion or even increasing texture size. -no effect
-adding multiple lights around elements. -no effect
code that i used for experiment:
sceneSetup = () => {
//initialize
const width = this.mount.clientWidth;
const height = this.mount.clientHeight;
this.scene = new THREE.Scene();
let helperScene = this.scene;
this.camera = new THREE.PerspectiveCamera(60, width / height, 1, 500);
this.camera.position.z = 200;
this.controls = new OrbitControls(this.camera, document.body);
this.renderer = new THREE.WebGLRenderer();
this.renderer.setSize(width, height);
this.mount.appendChild(this.renderer.domElement); //render to container (React staff)
///Load HDR map
new RGBELoader()
.setDataType(THREE.UnsignedByteType)
.load(HdrFile, function(texture) {
var envMap = pmremGenerator.fromEquirectangular(texture).texture;
helperScene.background = envMap; // comment to see issue
helperScene.environment = envMap;
texture.dispose();
pmremGenerator.dispose();
});
var pmremGenerator = new THREE.PMREMGenerator(this.renderer);
pmremGenerator.compileEquirectangularShader();
//create ground mirror
let geometry = new THREE.PlaneBufferGeometry(200, 200);
let groundMirror = new Reflector(geometry, {
clipBias: 0,
textureWidth: 1024,
textureHeight: 1024,
color: 0x889999,
recursion: 1
});
groundMirror .position.z = -20;
groundMirror .rotation.x = Math.PI * -0.5;
//change groundMirror .position.y to -104 and evrything looks fine;
groundMirror .position.y = -105;
this.scene.add(groundMirror );
};
addCustomSceneObjects = () => {
//create cube for reflect
const geometry = new THREE.BoxGeometry(50, 50, 50);
const material = new THREE.MeshPhongMaterial({
color: 0x156289,
emissive: 0x072534,
side: THREE.DoubleSide,
depthTest: true,
depthWrite: true
});
this.cube = new THREE.Mesh(geometry, material);
this.cube.position.y = 0;
this.scene.add(this.cube);
//radding lights
const lights = [];
lights[0] = new THREE.PointLight(0xffffff, 1, 0);
lights[1] = new THREE.PointLight(0xffffff, 1, 0);
lights[2] = new THREE.PointLight(0xffffff, 1, 0);
lights[0].position.set(0, 200, 0);
lights[1].position.set(100, 200, 100);
lights[2].position.set(-100, -200, -100);
this.scene.add(lights[0]);
this.scene.add(lights[1]);
this.scene.add(lights[2]);
};
startAnimationLoop = () => {
//rotate cube
this.cube.rotation.x += 0.01;
this.cube.rotation.y += 0.01;
this.requestID = window.requestAnimationFrame(this.startAnimationLoop);
this.renderer.render(this.scene, this.camera);
};

Orbiting around the origin using a device's orientation

I am trying to replicate the functionality of Google's Cardboard Demo "Exhibit" with three.js. I took the starting example straight from the Chrome Experiments web page and just dropped in code to draw a simple triangular pyramid in the init method:
function init() {
renderer = new THREE.WebGLRenderer();
element = renderer.domElement;
container = document.getElementById('example');
container.appendChild(element);
effect = new THREE.StereoEffect(renderer);
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera(90, 1, 0.001, 700);
camera.position.set(0, 0, 50);
scene.add(camera);
controls = new THREE.OrbitControls(camera, element);
controls.rotateUp(Math.PI / 4);
controls.noZoom = true;
controls.noPan = true;
var geometry = new THREE.CylinderGeometry( 0, 10, 30, 4, 1 );
var material = new THREE.MeshPhongMaterial( { color:0xffffff, shading: THREE.FlatShading } );
var mesh = new THREE.Mesh( geometry, material );
mesh.updateMatrix();
mesh.matrixAutoUpdate = false;
scene.add( mesh );
var light = new THREE.DirectionalLight( 0xffffff );
light.position.set( 1, 1, 1 );
scene.add( light );
light = new THREE.DirectionalLight( 0x002288 );
light.position.set( -1, -1, -1 );
scene.add( light );
light = new THREE.AmbientLight( 0x222222 );
scene.add( light );
function setOrientationControls(e) {
if (!e.alpha) {
return;
}
controls = new THREE.DeviceOrientationControls(camera);
controls.connect();
controls.update();
element.addEventListener('click', fullscreen, false);
window.removeEventListener('deviceorientation', setOrientationControls, true);
}
window.addEventListener('deviceorientation', setOrientationControls, true);
window.addEventListener('resize', resize, false);
setTimeout(resize, 1);
}
The OrbitControls method on desktop works perfectly: by dragging with the mouse, the screen orbits around the pyramid. On mobile using DeviceOrientationControls however, this effect is entirely lost and instead the camera moves freely at (0, 0, 0). I tried doing as a previous question suggested and replacing the camera with scene such that:
controls = new THREE.DeviceOrientationControls(scene);
however this does not work at all and nothing moves when the device is rotated. What do I need to change to replicate OrbitControls behavior with the motion captured by DeviceOrientationControls?
To create a deviceorientation orbit controler, like you see on this demo, http://novak.us/labs/UmDemo/; It involves modifying the existing OrbitControls.js.
The file changes can be seen in this commit on github: https://github.com/snovak/three.js/commit/f6542ab3d95b1c746ab4d39ab5d3253720830dd3
I've been meaning to do a pull request for months. Just haven't gotten around to it. Needs a bunch of clean up.
You can download my modified OrbitControls.js here (I haven't merged in months either, results may vary): https://raw.githubusercontent.com/snovak/three.js/master/examples/js/controls/OrbitControls.js
Below is how you would implement the modified OrbitControls in your own scripts:
this.controls = new THREE.OrbitControls( camera, document.getElementById('screen') ) ;
controls.tiltEnabled = true ; // default is false. You need to turn this on to control with the gyro sensor.
controls.minPolarAngle = Math.PI * 0.4; // radians
controls.maxPolarAngle = Math.PI * 0.6; // radians
controls.noZoom = true ;
// How far you can rotate on the horizontal axis, upper and lower limits.
// If set, must be a sub-interval of the interval [ - Math.PI, Math.PI ].
controls.minAzimuthAngle = - Math.PI * 0.1; // radians
controls.maxAzimuthAngle = Math.PI * 0.1; // radians
this.UMLogo = scene.children[1];
controls.target = UMLogo.position;
I hope that gets you where you want to be! :-)
To use the DeviceOrientationControls You must call controls.update() during your animation loop or the control will not update it's position based on device info.

Drawing a Line3 in Threejs

I want to draw a Line3 in a Threejs and here is my code for it:
start = new THREE.Vector3(20, 10, 0);
end = new THREE.Vector3(200, 100, 0);
var line = new THREE.Line3(start, end);
scene.add(line);
The code doesn't give any error but it doesn't draw the line either.
In the same program, I also have a sphere:
var initScene = function () {
window.scene = new THREE.Scene();
window.renderer = new THREE.WebGLRenderer({
alpha: true
});
window.renderer.setClearColor(0x000000, 0);
window.renderer.setSize(window.innerWidth, window.innerHeight);
window.renderer.domElement.style.position = 'fixed';
window.renderer.domElement.style.top = 0;
window.renderer.domElement.style.left = 0;
window.renderer.domElement.style.width = '100%';
window.renderer.domElement.style.height = '100%';
document.body.appendChild(window.renderer.domElement);
var directionalLight = new THREE.DirectionalLight( 0xffffff, 1 );
directionalLight.position.set( 0, 0.5, 1 );
window.scene.add(directionalLight);
window.camera = new THREE.PerspectiveCamera(45, window.innerWidth / window.innerHeight, 1, 1000);
window.camera.position.fromArray([0, 150, 700]);
window.camera.lookAt(new THREE.Vector3(0, 160, 0));
window.addEventListener('resize', function () {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.render(scene, camera);
}, false);
scene.add(camera);
// set up the sphere vars
var radius = 50,
segments = 16,
rings = 16;
// create a new mesh with
// sphere geometry - we will cover
// the sphereMaterial next!
var sphereMaterial =
new THREE.MeshLambertMaterial(
{
color: 0xCC0000
});
var sphere = new THREE.Mesh(
new THREE.SphereGeometry(
radius,
segments,
rings),
sphereMaterial);
// add the sphere to the scene
scene.add(sphere);
start = new THREE.Vector3(20, 10, 0);
end = new THREE.Vector3(200, 100, 0);
var line = new THREE.Line3(start, end);
scene.add(line);
renderer.render(scene, camera);
};
initScene();
I only see the sphere on the screen. Can you please tell me where I am wrong?
ADLADS (A Day Late and A Dollar Short):
From http://threejs.org/docs/
Line3 is one of many math objects of three.js that can be used to compute geometric stuff. Others are:
Box2 Box3 Color Euler Frustum Line3 Math Matrix3 Matrix4 Plane Quaternion Ray Sphere Spline Triangle Vector2 Vector3 Vector4
Say you had your "line", a Line3 object and "plane", a Plane object. You could check if the line intersected the plane by doing plane.intersectsLine(line). Would give true or false.
NONE OF THESE MATH OBJECTS (including Line3's) are Object3D's, the things to be rendered.
Here's the scoop:
1) Make the scene, the camera, and the renderer.
2) Add Object3D's to the scene.
2a) Object3D's can be Points, Lines, or Meshes.
2b) Points, Lines, and Meshes are made of Geometries and Materials.
2c1) for Points and Lines, the Geometries contain vertices(Vector3's) and faces(Face3's). Do "vertices.push" or "faces.push".
2c2) for Meshes, Geometries can be: Box Circle Cylinder Dodecahedron Extrude Icosahedron Lathe Octahedron Parametric Plane Polyhedron Ring Shape Sphere Tetrahedron Text Torus TorusKnot Tube
3) Do "renderer.render(scene, camera)".
Thanks for the question. It straightened me out.
For the sphere, you create a geometry (SphereGeometry) and pass it as argument to create a mesh (sphere) that has a material, so it can be drawed. Your line is a 2-vertices geometry, you need to create the corresponding mesh :
var lineMesh=new THREE.Line(
line,//the line3 geometry you have yet
new THREE.LineBasicMaterial({color:0x0000ff})//basic blue color as material
);
scene.add(lineMesh);

Unusual antialias while using basic texture material in three.js

In the following code I am setting up a very basic scene with an orthographic camera and a canvas mapped as a texture to a plane geometry.
I put some white text on to a transparent canvas, and if I use canvas.toDataURL(), the effect isn't present.
However, when I apply the canvas contents to a material as a texture and render it within a super-standard 2d scene, a black line outlines my text, probably the result of weird antialias stuff happening. In this example, the renderer clear color, material, and text are all pure white.
Here is a screenshot:
var camera = new THREE.OrthographicCamera(window.innerWidth / - 2,
window.innerWidth / 2,
window.innerHeight / 2,
window.innerHeight / - 2, 0, 10);
var scene = new THREE.Scene();
canvas = document.createElement('canvas');
canvas.width = 300;
canvas.height = 300;
var context = canvas.getContext('2d');
context.fillStyle = "white";
context.font = "bold 72px Arial";
context.fillText("Zibri", 50, 100);
var texture = new THREE.Texture(canvas);
var geometry = new THREE.PlaneGeometry(canvas.width, canvas.height);
texture.needsUpdate = true;
var material = new THREE.MeshBasicMaterial({ color: 0xffffff, map: texture, transparent: true });
var mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
renderer = new THREE.WebGLRenderer({ antialias: false });
renderer.setSize( window.innerWidth, window.innerHeight );
renderer.setClearColor(0xffffff)
document.body.appendChild( renderer.domElement );
camera.lookAt(scene.position);
renderer.render(scene, camera);
I've also been struggling with this problem, WestLangley's solution came part way to fixing the issue but left the rendered text with very poorly antialiased edges. After extensive research, I've come up with a solution that I'm happy with which I've outline here.
Before drawing the text, fill the canvas with the same colour as the text but with alpha set to 0.01
context.fillStyle = 'rgba(255,255,255,0.01)';
context.fillRect(0,0,canvas.width,canvas.height);
and then use the material's alphaTest property to discard pixels of this opacity:
var material = new THREE.MeshBasicMaterial({
color: 0xffffff,
map: texture,
transparent: true,
alphaTest:0.01
});
finally, set the texture map's premultiplyAlpha value to false:
texture.map.premultiplyAlpha = false;
texture.map.needsUpdate = true;

Categories

Resources