Rendering Complicated Models in Three.js - javascript

if some rockstar could look through my code real quick and tell me why I cant render/see my three.js model I'd be forever in your debt!
I'll post the whole script, but I think some preliminary info is important. Basically, I have thousands of points that look like this:
472232.14363148 2943288.56768013 200.129142"
472237.03086105 2943289.62356560 200.119496"
472241.91809061 2943290.67945108 200.109851"
472246.80532018 2943291.73533656 200.100205"
...and so on...
and a bunch of faces that look like this:
["1021 1020 1061", "640 754 641", "1534 1633 1535", "4701 27 26", "654 753 655", ...and so on...
When I extracted all the data and configured it correctly I then push it to the geometry and try to add it to the scene but with no success. Here's the whole script:
var renderer = new THREE.WebGLRenderer();
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
var camera = new THREE.PerspectiveCamera( 45, window.innerWidth / window.innerHeight, 1, 1000 );
camera.position.set( 0, 50, 50 );
camera.lookAt( 0, 0, 0 );
var scene = new THREE.Scene();
var material = new THREE.MeshStandardMaterial( { color : 0x00cc00 } );
var geometry = new THREE.Geometry();
var points = bigData['LandXML']['Surfaces']['Surface']['Definition']['Pnts']['P']
var faces = bigData['LandXML']['Surfaces']['Surface']['Definition']['Faces']['F']
for(i=0;i<points.length;i++){
var [point1,point2,point3] = points[i]['__text'].split(' ').map(Number)
//point1 *= .00001
//point2 *= .00001
//point3 *= .00001
geometry.vertices.push(new THREE.Vector3(point1,point2,point3))
}
for(i=0;i<faces.length;i++){
console.log(faces[i]);
var [face1,face2,face3] = faces[i].split(' ').map(Number)
var face = new THREE.Face3(face1 - 1,face2 - 1, face3 - 1)
geometry.faces.push(face)
}
scene.add( new THREE.Mesh( geometry, material ) );
You can see that in the for loop for the points, I have multiplied them by .00001 to scale the model because otherwise the numbers are so huge, if that makes sense. And I subtract 1 from each face because the data was not zero indexed. Anyways, if any coding superhero took the time to read this and has some insight, please help me out! Thanks!

So, I found your model on scene, the code missed those parts:
Your model has big coordinates, but small dimension. After dividing
all coordinates by 100.000 it has boundingSphere center at (4.72;
29,432; 0,00192) and boundingSphere radius 0,00480, So you need
to either translate model so it has center at (0;0;0) or move camera
target to it's center;
geometry.computeFaceNormals() must be called after building faces
in order to make faces resposive to lighting and shading;
Scene needs some light in it. In curent state it will not show
anything
Render loop or at least single call after adding mesh is required.
Here is fixed code, additionally I played with camera distance and added simple rotation animation. Also I used geometry translation approach to have an adequate rotation:
<!DOCTYPE html>
<html lang="en" dir="ltr">
<head>
<meta charset="utf-8">
<title>DXF Test</title>
</head>
<body>
<script src="three.min.js"></script>
<script type="text/javascript" src="JFM.js"></script>
<script>
var renderer = new THREE.WebGLRenderer();
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
var camera = new THREE.PerspectiveCamera(45, window.innerWidth / window.innerHeight, 1, 2000);
camera.position.set(0, 650, 650);
camera.lookAt(0, 0, 0);
var scene = new THREE.Scene();
var material = new THREE.MeshStandardMaterial({color: 0x00cc00});
var geometry = new THREE.Geometry();
//geometry.vertices.push(new THREE.Vector3(4.72227256402, 29.4328751179, 0.00200138787));
//geometry.vertices.push(new THREE.Vector3(3, 3, 0));
//geometry.vertices.push(new THREE.Vector3(5, 6, 0));
//var line = new THREE.Line( geometry, material );
//scene.add( line );
//renderer.render( scene, camera );
var points = bigData['LandXML']['Surfaces']['Surface']['Definition']['Pnts']['P'];
var faces = bigData['LandXML']['Surfaces']['Surface']['Definition']['Faces']['F'];
console.log('Complete XML file');
console.log(bigData);
console.log('POINTS');
console.log(points);
console.log('FACES');
console.log(faces);
for (i = 0; i < points.length; i++) {
var [point1, point2, point3] = points[i]['__text'].split(' ').map(Number);
geometry.vertices.push(new THREE.Vector3(point1, point2, point3));
}
for (i = 0; i < faces.length; i++) {
var [face1, face2, face3] = faces[i].split(' ').map(Number);
var face = new THREE.Face3(face1 - 1, face3 - 1, face2 - 1);
geometry.faces.push(face)
}
geometry.computeFaceNormals();
geometry.computeBoundingSphere();
// translate model so it's real center will be the same as mesh pivot
geometry.translate(
-geometry.boundingSphere.center.x,
-geometry.boundingSphere.center.y,
-geometry.boundingSphere.center.z
);
// compute bounding sphere again, because it was broken during translation
geometry.computeBoundingSphere();
var mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
var sunLight = new THREE.DirectionalLight(0xffffff);
sunLight.position.set(200, 600, 1000);
scene.add(sunLight);
function animate() {
requestAnimationFrame(animate);
renderer.render(scene, camera);
mesh.rotation.y += 0.01;
mesh.rotation.x += 0.01;
}
animate();
</script>
</body>
</html>
If you don't like translation, you can remove it, remove rotation and after computeBoundingSphere do like this:
camera.position.set(
geometry.boundingSphere.center.x + geometry.boundingSphere.radius,
geometry.boundingSphere.center.y + geometry.boundingSphere.radius,
geometry.boundingSphere.center.z + geometry.boundingSphere.radius,
);
camera.lookAt(
geometry.boundingSphere.center.x,
geometry.boundingSphere.center.y,
geometry.boundingSphere.center.z,
);
Hope it will help.

Related

How to set vertical angle of spherebuffergeometry? Why Orbit controls don't work properly when mesh is rotated?

I am having issues trying to set the default view of a panoramic image inside spherebuffergeometry. I have a mesh that makes use of this geometry and the material is an equirectangle panorama image. During runtime, the default view of the image is somewhere towards the right. I want the initial angle to be at the bottom of the camera. I tried changing the Phi angle and theta angle parameters of spherebuffergeometry. While I am able to move the default horizontal angle by changing Phi angle, the panoramic image looks weird when I change the theta angle.
I took the first two snapshots when phi angle was 0 and 100 respectively and theta angle was 0. I took the last snapshot when phi angle was 100 and theta angle was 1.
Any help would be appreciated.
Thanks
sphereBufferGeometry = new THREE.SphereGeometry(100,100,100,0,Math.PI*2,0,Math.PI); sphereBufferGeometry.scale( 1, 1, -1 );
var material = new THREE.MeshBasicMaterial( {
map: new THREE.TextureLoader().load([image] )
});
mesh = new THREE.Mesh( sphereBufferGeometry, material );
I have tried this as well.
init();
var scene,renderer,camera;
function init() {
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera(45, window.innerWidth / window.innerHeight, 0.1, 100);
var light = new THREE.PointLight(0xffffff, 0.8);
var light2 = new THREE.DirectionalLight( 0xffddcc, 1 );
light.position.set( 1, 0.75, 0.5 );
scene.add( light2 );
var sphereBufferGeometry = new THREE.SphereGeometry(100, 100, 100, 0, Math.PI * 2, 0, Math.PI);
sphereBufferGeometry.scale(1, 1, -1);
var material = new THREE.MeshBasicMaterial({
map: new THREE.TextureLoader().load('https://threejs.org/examples/textures/2294472375_24a3b8ef46_o.jpg')
});
var mesh = new THREE.Mesh(sphereBufferGeometry, material);
mesh.rotation.x=1.6;
scene.add(camera);
scene.add(light);
scene.add(mesh);
renderer = new THREE.WebGLRenderer({
antialias: true,
preserveDrawingBuffer: true
});
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
var orbitControls = new THREE.OrbitControls(camera, renderer.domElement);
camera.position.set(0, 0, 0.001);
orbitControls.update();
var container = document.getElementById('container');
var canvas = renderer.domElement;
canvas.setAttribute("class", "frame");
container.appendChild(canvas);
document.body.appendChild(container);
animate();
}
function animate()
{
requestAnimationFrame(animate);
renderer.render( scene, camera );
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/105/three.js"></script>
<script src="https://threejs.org/examples/js/controls/OrbitControls.js"></script>
<div id="container"></div>
. As you can see, I've rotated the mesh so that the default view is looking at the floor.. if we click and drag the mouse upwards such that the window in the view is horizontal and then click and drag left or right, orbit controls don't seem to work properly in that it doesn't give a proper panoramic experience.. I would like to know how can this be fixed

three.js - How to set camera position?

I'm pretty beginner in three.js and webgl programming. so I have created a box in three.js and its working fine but the problem is when I set camera position in z axis(eg: camera.position.z = 2; ) the box just disappears. could anyone explain me why is it happening and how can I properly set the position of the camera?
try uncommenting the camera.position.z = 2; in the fiddle
function init() {
var scene = new THREE.Scene();
var box = getBox(1, 1, 1);
scene.add(box);
var camera = new THREE.Camera(45, window.innerWidth/window.innerHeight, 1, 1000 );
//camera.position.z = 2;
var renderer = new THREE.WebGLRenderer();
renderer.setSize(window.innerWidth, window.innerHeight);
document.getElementById("webgl").appendChild(renderer.domElement);
renderer.render(scene, camera);
}
function getBox(w, h, d) {
var geometry = new THREE.BoxGeometry(w, h, d);
var material = new THREE.MeshBasicMaterial({
color : 0x00ff00
});
var mesh = new THREE.Mesh(geometry, material);
return mesh;
}
init();
not sure if you're trying to create this scene with an orthographic camera or perspective camera, but you'll typically need to specify the camera by type (ie THREE.PerspectiveCamera(...)).
I also added a few extra lines to ensure the camera was configured correctly, namely, setting the "LookAt" point to (0,0,0) , as well as setting an actual position of the camera via the THREE.Vector3.set(...) method.
Here are my adjustments to your code:
function init() {
var scene = new THREE.Scene();
var box = getBox(1, 1, 1);
scene.add(box);
var camera = new THREE.PerspectiveCamera(70,
window.innerWidth/window.innerHeight, 0.1, 1000 ); // Specify camera type like this
camera.position.set(0,2.5,2.5); // Set position like this
camera.lookAt(new THREE.Vector3(0,0,0)); // Set look at coordinate like this
var renderer = new THREE.WebGLRenderer();
renderer.setSize(window.innerWidth, window.innerHeight);
document.getElementById("webgl").appendChild(renderer.domElement);
renderer.render(scene, camera);
}
function getBox(w, h, d) {
var geometry = new THREE.BoxGeometry(w, h, d);
var material = new THREE.MeshBasicMaterial({
color : 0x00ff00
});
var mesh = new THREE.Mesh(geometry, material);
return mesh;
}
init();
Try
camera.position.set( <X> , <Y> , <Z> );
where <X> and <Z> are 2D coordinates and <Y> is height

Lithophane effect in Three JS

Is there a way to get Lithophane effect using three.js
.
Currently i have tried different materials with transparency and opacity but have not succeeded.
<html lang="en">
<head>
<title>Lith (Three.js)</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
</head>
<body>
<script src="js/three.min.js"></script>
<script src="./js/dat.gui.min.js"></script>
<script src="./js/STLLoader.js"></script>
<script src="js/Detector.js"></script>
<script src="js/OrbitControls.js"></script>
<script src="js/SkyShader.js"></script>
<script src="js/THREEx.WindowResize.js"></script>
<div id="ThreeJS" style="position: absolute; left:0px; top:0px"></div>
<script>
var container, scene, camera, renderer, controls, stats;
var clock = new THREE.Clock();
var cube;
init();
animate();
function init()
{
// SCENE
scene = new THREE.Scene();
// CAMERA
var SCREEN_WIDTH = window.innerWidth, SCREEN_HEIGHT = window.innerHeight;
var VIEW_ANGLE = 45, ASPECT = SCREEN_WIDTH / SCREEN_HEIGHT, NEAR = 0.1, FAR = 20000;
camera = new THREE.PerspectiveCamera( VIEW_ANGLE, ASPECT, NEAR, FAR);
scene.add(camera);
camera.position.set(0,150,400);
camera.lookAt(scene.position);
// RENDERER
if ( Detector.webgl )
renderer = new THREE.WebGLRenderer( {antialias:true} );
else
renderer = new THREE.CanvasRenderer();
renderer.setSize(SCREEN_WIDTH, SCREEN_HEIGHT);
renderer.setClearColor( 0x999999 );
container = document.getElementById( 'ThreeJS' );
container.appendChild( renderer.domElement );
// EVENTS
THREEx.WindowResize(renderer, camera);
controls = new THREE.OrbitControls( camera, renderer.domElement );
// SKYBOX/FOG
var skyBoxGeometry = new THREE.CubeGeometry( 10000, 10000, 10000 );
var skyBoxMaterial = new THREE.MeshBasicMaterial( { color: 0x9999ff, side: THREE.BackSide } );
var skyBox = new THREE.Mesh( skyBoxGeometry, skyBoxMaterial );
// scene.add(skyBox);
scene.fog = new THREE.FogExp2( 0x9999ff, 0.00025 );
////////////
// CUSTOM //
////////////
// must enable shadows on the renderer
renderer.shadowMapEnabled = true;
// "shadow cameras" show the light source and direction
// spotlight #1 -- yellow, dark shadow
var spotlight = new THREE.SpotLight(0xffff00);
spotlight.position.set(0,150,-50);
spotlight.shadowCameraVisible = true;
spotlight.shadowDarkness = 0.8;
spotlight.intensity = 2;
// must enable shadow casting ability for the light
spotlight.castShadow = true;
scene.add(spotlight);
var sphereSize = 10;
var pointLightHelper = new THREE.SpotLightHelper( spotlight, sphereSize );
scene.add( pointLightHelper );
var light = new THREE.SpotLight(0x999999);
light.intensity = 0.6;
camera.add(light);
var loader = new THREE.STLLoader();
loader.load('./TestOriginal.stl', function(object) {
meshObject = object;
var color = new THREE.Color( 0xffffff );
var material = new THREE.MeshPhongMaterial({
color: color,//'white',
side: THREE.DoubleSide,
//shading: THREE.SmoothShading,
opacity: 0.6,
transparent: true
});
this.mesh = new THREE.Mesh(object, material);
mesh.position.set(0,0,0);
scene.add(mesh);
mesh.position.set(0,0,0);
var newScale = 1;
mesh.geometry.computeBoundingBox();
boundingBox = mesh.geometry.boundingBox;
mesh.translateX(-((boundingBox.max.x + boundingBox.min.x) * newScale) / 2);
mesh.translateY(-((boundingBox.max.y + boundingBox.min.y) * newScale) / 2);
mesh.translateZ(-((boundingBox.max.z + boundingBox.min.z) * newScale) / 2);
});
// floor: mesh to receive shadows
var floorTexture = new THREE.ImageUtils.loadTexture( './checkerboard.jpg' );
floorTexture.wrapS = floorTexture.wrapT = THREE.RepeatWrapping;
floorTexture.repeat.set( 10, 10 );
// Note the change to Lambert material.
var floorMaterial = new THREE.MeshLambertMaterial( { map: floorTexture, side: THREE.DoubleSide } );
var floorGeometry = new THREE.PlaneGeometry(1000, 1000, 100, 100);
var floor = new THREE.Mesh(floorGeometry, floorMaterial);
floor.position.y = -80.5;
floor.rotation.x = Math.PI / 2;
floor.receiveShadow = true;
scene.add(floor);
}
function animate()
{
requestAnimationFrame( animate );
render();
update();
}
function update()
{
controls.update();
}
function render()
{
renderer.render( scene, camera );
}
</script>
</body>
</html>
And my output is like:
I have also tried shader material and it gives me something like:
What i want is: there should be a light from backside of object and the engrave part of object should glow (with respect to depth of object).
This is by no means an answer, but I really think the best approach to this would be writing (or finding) a custom shader. I will preface this by saying that writing shaders is NOT child's play: it can become incredibly complex, due to shader programming languages being relatively low level and relying heavily on a knowledge of esoteric geometry / mathematics.
After some snooping, it looks like you'd need a shader that achieves something called Sub-Surface Scattering (known colloquially as SSS) - this is essentially the behavior of light through a translucent object, based on its thickness (and some other properties which I won't delve into):
To achieve a Lithophane effect, you'd need to generate a "thickness map" of sorts that maps to your object's mesh, and then the custom shader would scatter light correctly through this map to produce an effect similar to what you desire.
I learnt all of this from going through a principally simple presentation performed by the Lead Rendering Programmer at DICE games. Here's an example slide from the presentation:
This shader produces an effect that resembles this, based on thickness, in real time:
If you're serious about achieving this effect, I'd seriously recommend doing some reading on Cg or GLSL shader programming. Personally I like Cg, because i was incentivised to learn it for it's compatibility with Unity3D. For this reason, I found a great resource for learning Cg, namely the wikibook on the subject. It's (somewhat) specific to Unity, but the principles will be sound.
Either way I wish you luck on the subject. Hope this helps!

Raycaster doesn't report intersection with plane

I'm trying to detect collision with terrain, that is created by modifying plane's verticles heights.
But Raycaster detects collision correctly only in about 10% of all attempts.
You can see one of those intersections, that does not detect properly on following example:
http://cerno.ch/~kamen/threejs/test.html
What am I doing wrong?
EDIT: Here's jsfiddle
var camera, scene, renderer, gump = 0;
var geometry, material, mesh, map, axis, ray;
init();
animate();
function init() {
//Create cam, so we can see what's happening
camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, 1, 10000 );
camera.position.x = 500;
camera.position.y = 300;
//Init scene
scene = new THREE.Scene();
//Load terrain material
material = new THREE.MeshBasicMaterial( { color: 0xff6666, wireframe: false } );
geometry = new THREE.PlaneGeometry(128, 128, 127, 127);
mesh = new THREE.Mesh( geometry, material );
mesh.scale.set(50,50,50);
mesh.rotation.x = -Math.PI/2;
scene.add( mesh );
//Create axis with position and rotation of ray
axis = new THREE.AxisHelper( 100 );
axis.position = new THREE.Vector3(333.2637, 216.6575, -515.6349);
axis.rotation = new THREE.Vector3(1.6621025025, 0.119175114, -2.2270436357);
axis.scale.z = 10;
scene.add(axis);
//Create actual ray, use axis position and rotation
ray = new THREE.Raycaster();
ray.ray.origin.copy(axis.position);
ray.ray.direction.copy(axis.rotation);
//Renderer
renderer = new THREE.WebGLRenderer();
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
}
function animate() {
requestAnimationFrame( animate );
if (mesh)
{
var intersecionts = ray.intersectObject(mesh);
if (intersecionts.length > 0)
{
//Never actually happens
console.log("OK");
}
//Move axis so you can see, that it clearly goes throught mesh object
axis.translateZ(Math.cos(gump) * 2);
gump += 0.01;
//Focus camera on axis
camera.lookAt(axis.position);
}
renderer.render( scene, camera );
}
http://jsfiddle.net/BAGnd/
EDIT: Adding system specs as requested:
Windows 7 64bit
Chrome 26.0.1410
Threejs 57
Graphics card: GTX 560 Ti
Found my error.
I thought, that rotation vector, used to rotate object, and direction vector, used in ray, are same things.
After I transfer my rotation vector to direction vector, it works just fine.
How to transfer rotation vector to direction vector: How to get Orientation of Camera in THREE.js

three.js: rotate tetrahedron on correct axis

I have to display a rotating tetrahedron in an animated HTML5 graphic, using three.js.
When i create the object, it's upside down, but it should be on the ground with one surface, like on this screenshot: http://www10.pic-upload.de/08.10.12/v3nnz25zo65q.png
This is the code for rotating the object, at the moment. Stored in render() function. But the axis is incorrect and the object is rotating wrong.
object.useQuaternion = true;
var rotateQuaternion_ = new THREE.Quaternion();
rotateQuaternion_.setFromAxisAngle(new THREE.Vector3(0, 1, 1), 0.2 * (Math.PI / 180));
object.quaternion.multiplySelf(rotateQuaternion_);
object.quaternion.normalize();
(copied from Three.JS - how to set rotation axis)
This is the result at the moment: http://jsfiddle.net/DkhT3/
How can i access the correct axis to move/rotate the tetrahedron on the ground?
Thanks for suggestions!
Don't do what you copied. It is not correct.
I am assuming that what you want to do is to start off with a tetrahedron that is right-side-up to begin with. One thing you can do is to modify THREE.TetrahedronGeometry() to use four vertices that produce the tetrahedron that you want.
If you want to use the existing code, then what you have to do is to apply a rotation to the geometry right after it is created, like so:
const geometry = new THREE.TetrahedronGeometry( 10, 0 );
geometry.applyMatrix4( new THREE.Matrix4().makeRotationAxis( new THREE.Vector3( 1, 0, - 1 ).normalize(), Math.atan( Math.sqrt( 2 ) ) ) );
geometry.translate( 0, 10 / 3, 0 ); // optional, so it sits on the x-z plane
(Determining the proper rotation angle requires a bit of math, but it turns out to be the arctangent of the sqrt( 2 ) in this case, in radians.)
three.js r.146
Download three.js from [http://www.html5canvastutorials.com/libraries/Three.js] library and put in the below HTML code.
You will get good output.
<!Doctype html>
<html lang="eng">
<head>
<title>I like shapes</title>
<meta charset="UTF-8">
<style>
canvas{
width: 100%;
height: 100%
}
body{
background: url(mathematics.jpg);
width: 800px;
height: 500px;
}
</style>
</head>
<body>
<script src="three.js"></script>
<script>
// creating a Scene
var scene = new THREE.Scene();
// Adding Perspective Camera
// NOTE: There are 3 cameras: Orthographic, Perspective and Combined.
var camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.1, 1000);
//create WebGL renderer
var renderer = new THREE.WebGLRenderer();
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
//Drawing a Tetrahedron.
var pyramidgeometry = new THREE.CylinderGeometry(0, 0.8, 4, 4, false);
//Change colour of Pyramid using Wireframe
var pyramidmaterial = new THREE.MeshBasicMaterial({wireframe: true, color: 0x0ff000});
var pyramid = new THREE.Mesh(pyramidgeometry, pyramidmaterial); //This creates the cone
pyramid.position.set(3.1,0,0);
scene.add(pyramid);
//moving camera outward of centre to Z-axis, because Cube is being created at the centre
camera.position.z = 5;
//we have a scene, a camera, a cube and a renderer.
/*A render loop essentially makes the renderer draw the scene 60 times per second.
I have used requestAnimationFrame() function.*/
var render = function() {
requestAnimationFrame(render);
/*cube.rotation.x += 0.01;
cube.rotation.y += 0.01;
cube.rotation.z += 0.01;*/
pyramid.rotation.y += 0.01;
pyramid.rotation.x += 0.01;
pyramid.rotation.z += 0.01;
renderer.render(scene, camera);
};
// calling the function
render();
</script>
</body>
</html>

Categories

Resources