Mapbox elevation data precision - javascript

I've created a simple three.js app which renders map tiles in 3D. Each tile is a separated mesh (a plane with 10 height and 10 width segments) which uses a THREE.ShaderMaterial. In order to access elevation data I used this Mapbox tileset. Two png tiles (elevation and color) are passed to the simple shader that decodes height value:
<script id="vertexShader" type="x-shader/x-vertex">
uniform sampler2D bump;
uniform sampler2D map;
varying float vAmount;
varying vec2 vUv;
void main()
{
vUv = uv;
vec4 bumpData = texture2D( bump, uv );
vAmount = bumpData.x;
float height = (-0.4 + (bumpData.x * 256.0 * 256.0 + bumpData.y * 256.0 + bumpData.z)/1024.0);
vec3 newPosition = position;
newPosition.z = height;
gl_Position = projectionMatrix * modelViewMatrix * vec4( newPosition, 1.0 );
}
</script>
<script id="fragmentShader" type="x-shader/x-vertex">
uniform sampler2D bump;
uniform sampler2D map;
varying vec2 vUv;
varying float vAmount;
void main()
{
if(vAmount == 0.0) {
discard;
}
vec4 tile = texture2D(map, vUv);
gl_FragColor = vec4(0.0, 0.0, 0.0, 1.0) + tile;
}
</script>
However, it seems like nearby tiles have a bit different height values on edges. I've tried to use THREE.NearestFilter for textures but it didn't have any effect. Is there a way to get rid of these white stripes?

Related

Is it possible to color blend and use lighting at the same shader program?

I'm having a problem figuring out how to use color blending and lighting (lambert) using the same shader program, because as far as my studies goes, they don't use the same vertex shader. Is there a way to do this?
My Shaders looks like this:
<script id="vertex-shader" type="notjs">#version 300 es
in vec3 a_position;
in vec3 a_normal;
//in vec4 a_color;
uniform mat4 u_model_matrix;
uniform mat4 u_view_matrix;
uniform mat4 u_projection_matrix;
uniform mat4 u_normal_matrix;
uniform vec3 u_material_diffuse;
uniform vec3 u_light_diffuse;
uniform vec3 u_light_direction;
out vec4 v_color;
void main() {
gl_PointSize = 10.0;
gl_Position = u_projection_matrix * u_view_matrix * u_model_matrix * vec4(a_position, 1.0);
vec3 corrected_a_normal = vec3(u_normal_matrix * vec4(a_normal, 1.0));
vec3 normalized_a_normal = normalize(corrected_a_normal);
vec3 normalized_u_light_direction = normalize(u_light_direction);
float lambert_coefficient = dot(-normalized_u_light_direction, normalized_a_normal);
lambert_coefficient = max(lambert_coefficient, 0.0);
vec3 diffuse_color = u_light_diffuse * u_material_diffuse * lambert_coefficient;
v_color = vec4(diffuse_color,1.0);
}
</script>
<script id="fragment-shader" type="notjs">#version 300 es
precision mediump float;
in vec4 v_color;
out vec4 outColor;
void main() {
outColor = v_color;
}
</script>
You can blend colors however you want.
It's sounds like what you mean is can you use vertex colors and lighting at the same time.
Yes
in vec4 a_color;
...
vec3 diffuse_color = u_light_diffuse *
u_material_diffuse *
a_color *
lambert_coefficient;
For example would be one way of "blending" those colors.

Can't manage to make a shader running on three.js from shaderfrom

I would like to import that shader on my project:
https://shaderfrog.com/app/view/2447
This is my fragment shader:
<script id="fragmentShader" type="x-shader/x-fragment">
#ifdef GL_ES
precision highp float;
precision highp int;
#endif
uniform vec2 u_resolution;
uniform vec2 u_mouse;
uniform float u_time;
uniform sampler2D texture1;
// Example varyings passed from the vertex shader
varying vec3 vPosition;
varying vec3 vNormal;
varying vec2 vUv;
void main() {
vec3 color = vec3(texture2D(texture1, vUv));
vec3 outcolor=color;
float v = u_time + (vUv.x*0.5 + vUv.y*0.5);
vec2 Uv2 = vec2(color.r+color.b+v,color.g+color.b+v);
outcolor = vec3(texture2D(texture1, Uv2));
gl_FragColor = vec4( outcolor, 1.0 );
}
</script>
And this is my vertex shader:
<script id="vertexShader" type="x-shader/x-vertex">
varying vec3 vPosition;
varying vec3 vNormal;
varying vec2 vUv;
void main() {
vNormal = normal;
vUv = uv;
vPosition = position;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
My shader is compiling, but I don't have the same result, I only have a highly frozen contrasted image. Plus, I do increment my uniform u_time value on a requestAnimationFrame function.
I can't see what I'm doing wrong?
I've simplified your code and made it work with the latest revision (r96).
Pay attention to the settings of the texture.
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera(60, window.innerWidth / window.innerHeight, 1, 1000);
camera.position.set(0, 0, 2);
var renderer = new THREE.WebGLRenderer();
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
var tex = new THREE.TextureLoader().load("https://threejs.org/examples/textures/UV_Grid_Sm.jpg");
tex.wrapS = THREE.RepeatWrapping;
tex.wrapT = THREE.RepeatWrapping;
var geo = new THREE.PlaneBufferGeometry(2, 2);
var mat = new THREE.ShaderMaterial({
uniforms:{
time: {value: 0},
texture1: {value: tex}
},
vertexShader:`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);
}
`,
fragmentShader: `
uniform float time;
uniform sampler2D texture1;
varying vec2 vUv;
void main() {
vec3 c = vec3(texture2D(texture1, vUv));
float v = time + (vUv.x*0.5 + vUv.y*0.5);
vec2 Uv2 = vec2(c.r + c.b+v, c.g + c.b + v);
vec3 outcolor = vec3(texture2D(texture1, Uv2));
gl_FragColor = vec4( outcolor, 1.0 );
}
`
});
var plane = new THREE.Mesh(geo, mat);
scene.add(plane);
var clock = new THREE.Clock();
var time = 0;
render();
function render(){
requestAnimationFrame(render);
time += clock.getDelta();
mat.uniforms.time.value = time;
renderer.render(scene, camera);
}
body{
overflow: hidden;
margin: 0;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/96/three.min.js"></script>

How to blend textures with different size in webgl

I have 2 pictures with different sizes. Say the bgImage is 300*200 and the modelImage is 50*20. It can be described below, where the x and y can be (100, 50)
There is my glsl code:
vs:
attribute vec4 a_position;
attribute vec2 a_texCoord;
uniform float u_flipY;
varying vec2 v_texCoord;
void main() {
gl_Position = a_position * vec4(1.0, u_flipY, 0.0, 1.0);
v_texCoord = a_texCoord;
}
fs:
precision mediump float;
uniform sampler2D u_bgImage;
uniform sampler2D u_modelImage;
varying vec2 v_texCoord;
vec3 compose(vec4 bgRGBA, vec4 modelRGBA) {
vec3 bgRGB = bgRGBA.rgb;
float alpha = modelRGBA.a;
vec3 modelRGB = modelRGBA.rgb;
return modelRGB + bgRGB * (1.0 - alpha);
}
void main() {
vec4 bgRGBA = texture2D(u_bgImage, v_texCoord);
vec4 modelRGBA = texture2D(u_coverImage, v_texCoord);
gl_FragColor = vec4(compose(bgRGBA, modelRGBA), 1.0);
}
It seems like that when the webgl render, bgImage and modelImage will always have same size. So how to blend textures with different size?

THREE.WebGLRenderer: Unknown uniform type: 1009

I'm using THREE.js r73, and I'm trying to make a particle system using THREE.BufferGeometery to hold the vertices and THREE.ShaderMaterial to add functionality to them. For some reason, I'm getting the error above. Here is my code (using Typescript). The error is occurring at the line gl_FragColor = gl_FragColor * texture2D( texture, gl_PointCoord ); (The last one in the fragment shader).
this.particleSystem = new THREE.PointCloud(
this.particles,
new THREE.ShaderMaterial({
uniforms: {
texture: wTex
},
vertexShader: `
attribute vec3 color;
attribute float size;
varying vec3 vColor;
varying vec2 vUv;
void main() {
vUv = uv;
vColor = color; // set color associated to vertex; use later in fragment shader
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
gl_PointSize = size * ( 300.0 / length( mvPosition.xyz ) );
gl_Position = projectionMatrix * mvPosition;
}
`,
fragmentShader: `
uniform sampler2D texture;
varying vec3 vColor; // colors associated to vertices; assigned by vertex shader
varying vec2 vUv;
void main() {
// calculates a color for the particle
gl_FragColor = vec4( vColor, 1.0 );
// sets particle texture to desired color
gl_FragColor = gl_FragColor * texture2D( texture, gl_PointCoord );
}
`
})
);
The code was adapted from some examples I found online, but I understand it.
You're not constructing the uniform correctly. It should be...
uniforms: {
texture: { type: 't', value: wTex }
}

Explode tessellation shader textureCube drawing not real texture

I want to make explode effect like this (http://threejs.org/examples/#webgl_modifier_tessellation), but for cube with panorama textures (cube with 6 images).
I transform vertex shader to this:
uniform float amplitude;
attribute float displacement;
varying vec3 vNormal;
varying vec2 vUv;
void main() {
vNormal = normal;
vUv = ( 0.5 + amplitude ) * uv + vec2( amplitude );
vec3 newPosition = position + amplitude * normal * vec3( displacement );
gl_Position = projectionMatrix * modelViewMatrix * vec4( newPosition, 1.0 );
}
and fragment to this by adding samplerCube and textureCube:
varying vec3 vNormal;
varying vec2 vUv;
uniform samplerCube texture1;
void main() {
vec3 light = vec3( 0.5, 0.2, 1.0 );
light = normalize( light );
float dProd = dot( vNormal, light ) * 0.5 + 0.5;
gl_FragColor = vec4( vec3( dProd ), 1.0 );
gl_FragColor = textureCube(texture1, vNormal);
}
But result doesn't looks good, it's shows color, but not real detailed texture (look texture http://pg2.paraplan.io/_pano/cube/1024/nx.jpg).
Can you help me with shader setting, or may be know how to do explode tessellation without shaders?
DEMO: http://pg2.paraplan.io/_pano/cube/scene.html

Categories

Resources