I'm noob to WebGL and tried to make outlined rectangle but in same time with using shaders that i mixed them :)
The problem is the code doesn't give errors, But doesn't render outlined rectangle with clearing, It's just clear and rectangle not rendered...
Here is vertex shader...
// VERTEX SHADER
attribute vec2 a_position;
attribute vec2 a_texCoord;
uniform vec2 u_translation;
uniform vec2 u_resolution;
varying vec2 v_texCoord;
void main() {
vec2 zeroToOne = (a_position + u_translation) / u_resolution;
vec2 zeroToTwo = zeroToOne * 2.0;
vec2 clipSpace = zeroToTwo - 1.0;
gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);
v_texCoord = a_texCoord;
}
Here is fragment shader...
// FRAGMENT SHADER
precision mediump float;
uniform sampler2D u_image;
uniform vec4 u_color;
uniform int u_mode;
varying vec2 v_texCoord;
void main(void) {
if (u_mode == 1) { gl_FragColor = u_color; }
if (u_mode == 2) { gl_FragColor = texture2D(u_image, v_texCoord); }
if (u_mode == 3) { gl_FragColor = u_color + texture2D(u_image, v_texCoord); }
}
Oh...And here is HTML file...
<!DOCTYPE html>
<html>
<head>
<title>POLYGL Test</title>
</head>
<body>
<canvas width="600" height="600"></canvas>
<script>
// Get context
var gl = document.getElementsByTagName("canvas")[0].getContext("webgl");
// Create buffers for rectangle
var x = 100, y = 100, w = 50, h = 50;
var rect = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, rect);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
x, -y,
x + w, -y,
x + w, -(y + h),
x, -(y + h),
]), gl.STATIC_DRAW);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
// Create shaders and assign GLSL code for them...
var vs = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vs, `
attribute vec2 a_position;
attribute vec2 a_texCoord;
uniform vec2 u_translation;
uniform vec2 u_resolution;
varying vec2 v_texCoord;
void main() {
vec2 zeroToOne = (a_position + u_translation) / u_resolution;
vec2 zeroToTwo = zeroToOne * 2.0;
vec2 clipSpace = zeroToTwo - 1.0;
gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);
v_texCoord = a_texCoord;
}`);
gl.compileShader(vs);
var fs = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fs, `
precision mediump float;
uniform sampler2D u_image;
uniform vec4 u_color;
uniform int u_mode;
varying vec2 v_texCoord;
void main(void) {
if (u_mode == 1) { gl_FragColor = u_color; }
if (u_mode == 2) { gl_FragColor = texture2D(u_image, v_texCoord); }
if (u_mode == 3) { gl_FragColor = u_color + texture2D(u_image, v_texCoord); }
}`);
gl.compileShader(fs);
// Create program and link all
var p = gl.createProgram();
gl.attachShader(p, vs);
gl.attachShader(p, fs);
gl.linkProgram(p);
gl.useProgram(p);
// Delete shaders and program after being used...
gl.deleteShader(vs);
gl.deleteShader(fs);
gl.deleteProgram(p);
// Clear to black and set viewport
gl.clearColor(0, 0, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
// Set color, resolution, And translation...
// Set mode to 1 so it only renders with gl_FragColor
gl.uniform1i(gl.getUniformLocation(p, "u_mode"), 1);
gl.uniform4f(gl.getUniformLocation(p, "u_color"), Math.random(), Math.random(), Math.random(), 1);
gl.uniform2f(gl.getUniformLocation(p, "u_resolution"), gl.canvas.width, gl.canvas.height);
gl.uniform2f(gl.getUniformLocation(p, "u_translation"), 0, 0);
// Bind rectangle buffer
// Enable vertex attribute and assign buffer to shaders...
gl.bindBuffer(gl.ARRAY_BUFFER, rect);
gl.vertexAttribPointer(gl.getAttribLocation(p, "a_position"), 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(p, "a_position"));
// Draw!
gl.drawArrays(gl.LINE_LOOP, 0, 4);
</script>
</body>
</html>
I tried even to remove deletions of programs but looks like it doesn't help...
Can anyone help me?
Solved, The indices aren't right cause height is negative, Fixed!
<!DOCTYPE html>
<html>
<head>
<title>POLYGL Test</title>
</head>
<body>
<canvas width="600" height="600"></canvas>
<script>
var gl = document.getElementsByTagName("canvas")[0].getContext("webgl");
var x = 100, y = 100, w = 50, h = 50;
var rect = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, rect);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
x, y,
x + w, y,
x + w, y + h,
x, y + h,
]), gl.STATIC_DRAW);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
var vs = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vs, `
attribute vec2 a_position;
attribute vec2 a_texCoord;
uniform vec2 u_translation;
uniform vec2 u_resolution;
varying vec2 v_texCoord;
void main() {
vec2 zeroToOne = (a_position + u_translation) / u_resolution;
vec2 zeroToTwo = zeroToOne * 2.0;
vec2 clipSpace = zeroToTwo - 1.0;
gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);
v_texCoord = a_texCoord;
}`);
gl.compileShader(vs);
var fs = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fs, `
precision mediump float;
uniform sampler2D u_image;
uniform vec4 u_color;
uniform int u_mode;
varying vec2 v_texCoord;
void main(void) {
if (u_mode == 1) { gl_FragColor = u_color; }
if (u_mode == 2) { gl_FragColor = texture2D(u_image, v_texCoord); }
if (u_mode == 3) { gl_FragColor = u_color + texture2D(u_image, v_texCoord); }
}`);
gl.compileShader(fs);
var p = gl.createProgram();
gl.attachShader(p, vs);
gl.attachShader(p, fs);
gl.linkProgram(p);
gl.useProgram(p);
gl.deleteShader(vs);
gl.deleteShader(fs);
gl.deleteProgram(p);
gl.clearColor(0, 0, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.uniform1i(gl.getUniformLocation(p, "u_mode"), 1);
gl.uniform4f(gl.getUniformLocation(p, "u_color"), Math.random(), Math.random(), Math.random(), 1);
gl.uniform2f(gl.getUniformLocation(p, "u_resolution"), gl.canvas.width, gl.canvas.height);
gl.uniform2f(gl.getUniformLocation(p, "u_translation"), 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, rect);
gl.vertexAttribPointer(gl.getAttribLocation(p, "a_position"), 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(p, "a_position"));
gl.drawArrays(gl.LINE_LOOP, 0, 4);
</script>
</body>
</html>
I'm having a problem figuring out how to use color blending and lighting (lambert) using the same shader program, because as far as my studies goes, they don't use the same vertex shader. Is there a way to do this?
My Shaders looks like this:
<script id="vertex-shader" type="notjs">#version 300 es
in vec3 a_position;
in vec3 a_normal;
//in vec4 a_color;
uniform mat4 u_model_matrix;
uniform mat4 u_view_matrix;
uniform mat4 u_projection_matrix;
uniform mat4 u_normal_matrix;
uniform vec3 u_material_diffuse;
uniform vec3 u_light_diffuse;
uniform vec3 u_light_direction;
out vec4 v_color;
void main() {
gl_PointSize = 10.0;
gl_Position = u_projection_matrix * u_view_matrix * u_model_matrix * vec4(a_position, 1.0);
vec3 corrected_a_normal = vec3(u_normal_matrix * vec4(a_normal, 1.0));
vec3 normalized_a_normal = normalize(corrected_a_normal);
vec3 normalized_u_light_direction = normalize(u_light_direction);
float lambert_coefficient = dot(-normalized_u_light_direction, normalized_a_normal);
lambert_coefficient = max(lambert_coefficient, 0.0);
vec3 diffuse_color = u_light_diffuse * u_material_diffuse * lambert_coefficient;
v_color = vec4(diffuse_color,1.0);
}
</script>
<script id="fragment-shader" type="notjs">#version 300 es
precision mediump float;
in vec4 v_color;
out vec4 outColor;
void main() {
outColor = v_color;
}
</script>
You can blend colors however you want.
It's sounds like what you mean is can you use vertex colors and lighting at the same time.
Yes
in vec4 a_color;
...
vec3 diffuse_color = u_light_diffuse *
u_material_diffuse *
a_color *
lambert_coefficient;
For example would be one way of "blending" those colors.
I've created a simple three.js app which renders map tiles in 3D. Each tile is a separated mesh (a plane with 10 height and 10 width segments) which uses a THREE.ShaderMaterial. In order to access elevation data I used this Mapbox tileset. Two png tiles (elevation and color) are passed to the simple shader that decodes height value:
<script id="vertexShader" type="x-shader/x-vertex">
uniform sampler2D bump;
uniform sampler2D map;
varying float vAmount;
varying vec2 vUv;
void main()
{
vUv = uv;
vec4 bumpData = texture2D( bump, uv );
vAmount = bumpData.x;
float height = (-0.4 + (bumpData.x * 256.0 * 256.0 + bumpData.y * 256.0 + bumpData.z)/1024.0);
vec3 newPosition = position;
newPosition.z = height;
gl_Position = projectionMatrix * modelViewMatrix * vec4( newPosition, 1.0 );
}
</script>
<script id="fragmentShader" type="x-shader/x-vertex">
uniform sampler2D bump;
uniform sampler2D map;
varying vec2 vUv;
varying float vAmount;
void main()
{
if(vAmount == 0.0) {
discard;
}
vec4 tile = texture2D(map, vUv);
gl_FragColor = vec4(0.0, 0.0, 0.0, 1.0) + tile;
}
</script>
However, it seems like nearby tiles have a bit different height values on edges. I've tried to use THREE.NearestFilter for textures but it didn't have any effect. Is there a way to get rid of these white stripes?
I would like to import that shader on my project:
https://shaderfrog.com/app/view/2447
This is my fragment shader:
<script id="fragmentShader" type="x-shader/x-fragment">
#ifdef GL_ES
precision highp float;
precision highp int;
#endif
uniform vec2 u_resolution;
uniform vec2 u_mouse;
uniform float u_time;
uniform sampler2D texture1;
// Example varyings passed from the vertex shader
varying vec3 vPosition;
varying vec3 vNormal;
varying vec2 vUv;
void main() {
vec3 color = vec3(texture2D(texture1, vUv));
vec3 outcolor=color;
float v = u_time + (vUv.x*0.5 + vUv.y*0.5);
vec2 Uv2 = vec2(color.r+color.b+v,color.g+color.b+v);
outcolor = vec3(texture2D(texture1, Uv2));
gl_FragColor = vec4( outcolor, 1.0 );
}
</script>
And this is my vertex shader:
<script id="vertexShader" type="x-shader/x-vertex">
varying vec3 vPosition;
varying vec3 vNormal;
varying vec2 vUv;
void main() {
vNormal = normal;
vUv = uv;
vPosition = position;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
My shader is compiling, but I don't have the same result, I only have a highly frozen contrasted image. Plus, I do increment my uniform u_time value on a requestAnimationFrame function.
I can't see what I'm doing wrong?
I've simplified your code and made it work with the latest revision (r96).
Pay attention to the settings of the texture.
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera(60, window.innerWidth / window.innerHeight, 1, 1000);
camera.position.set(0, 0, 2);
var renderer = new THREE.WebGLRenderer();
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
var tex = new THREE.TextureLoader().load("https://threejs.org/examples/textures/UV_Grid_Sm.jpg");
tex.wrapS = THREE.RepeatWrapping;
tex.wrapT = THREE.RepeatWrapping;
var geo = new THREE.PlaneBufferGeometry(2, 2);
var mat = new THREE.ShaderMaterial({
uniforms:{
time: {value: 0},
texture1: {value: tex}
},
vertexShader:`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);
}
`,
fragmentShader: `
uniform float time;
uniform sampler2D texture1;
varying vec2 vUv;
void main() {
vec3 c = vec3(texture2D(texture1, vUv));
float v = time + (vUv.x*0.5 + vUv.y*0.5);
vec2 Uv2 = vec2(c.r + c.b+v, c.g + c.b + v);
vec3 outcolor = vec3(texture2D(texture1, Uv2));
gl_FragColor = vec4( outcolor, 1.0 );
}
`
});
var plane = new THREE.Mesh(geo, mat);
scene.add(plane);
var clock = new THREE.Clock();
var time = 0;
render();
function render(){
requestAnimationFrame(render);
time += clock.getDelta();
mat.uniforms.time.value = time;
renderer.render(scene, camera);
}
body{
overflow: hidden;
margin: 0;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/96/three.min.js"></script>
I'm using THREE.js r73, and I'm trying to make a particle system using THREE.BufferGeometery to hold the vertices and THREE.ShaderMaterial to add functionality to them. For some reason, I'm getting the error above. Here is my code (using Typescript). The error is occurring at the line gl_FragColor = gl_FragColor * texture2D( texture, gl_PointCoord ); (The last one in the fragment shader).
this.particleSystem = new THREE.PointCloud(
this.particles,
new THREE.ShaderMaterial({
uniforms: {
texture: wTex
},
vertexShader: `
attribute vec3 color;
attribute float size;
varying vec3 vColor;
varying vec2 vUv;
void main() {
vUv = uv;
vColor = color; // set color associated to vertex; use later in fragment shader
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
gl_PointSize = size * ( 300.0 / length( mvPosition.xyz ) );
gl_Position = projectionMatrix * mvPosition;
}
`,
fragmentShader: `
uniform sampler2D texture;
varying vec3 vColor; // colors associated to vertices; assigned by vertex shader
varying vec2 vUv;
void main() {
// calculates a color for the particle
gl_FragColor = vec4( vColor, 1.0 );
// sets particle texture to desired color
gl_FragColor = gl_FragColor * texture2D( texture, gl_PointCoord );
}
`
})
);
The code was adapted from some examples I found online, but I understand it.
You're not constructing the uniform correctly. It should be...
uniforms: {
texture: { type: 't', value: wTex }
}