Make pixel intensity dependant on distance from mouse - javascript

I am rendering my scene in a 512x512 canvas, my goal is to have the pixel intensity change according to how far away the pixel is from the mouse cursor. But it doesn't seem to change at all. I try multipling by a large number, then it's all white.
var ico = new THREE.Mesh(new THREE.IcosahedronGeometry(0.3,0), light_mat);
ico.position.x = 0;
ico.position.y = 0;
ico.position.z = -3;
scene.add(ico);
var render = function() {
vec.set(
( mouse_x / 512 ) * 2 - 1,
- ( mouse_y / 512 ) * 2 + 1,
0.5 );
vec.unproject( camera );
vec.sub( camera.position ).normalize();
var distance = (ico.position.z-camera.position.z ) / vec.z;
pos.copy( camera.position ).add( vec.multiplyScalar( distance ) );
console.log(pos.x+":"+pos.y+":"+pos.z);
ico.material.uniforms.mouse.value = pos;
requestAnimationFrame(render);
renderer.render(scene, camera);
};
...
<script type="x-shader/x-vertex" id="light_f">
varying vec3 pos;
varying vec2 uv_pos;
varying vec3 norm;
uniform vec3 mouse;
void main() {
vec4 tex = vec4(1.0,1.0,1.0,1.0);
float ang = dot(mouse,norm);
float dis = 0.1*distance(mouse,pos);
gl_FragColor = tex*dis;
}
</script>
<script type="x-shader/x-vertex" id="light_v">
uniform vec3 mouse;
varying vec2 uv_pos;
varying vec3 norm;
varying vec3 pos;
void main() {
uv_pos = uv;
norm = normal;
pos = position;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
Any help appreciated.

Related

Threejs blending modes doesn't work for 3 out of the 5 modes

I am trying to use three.js blending and I have been trying to get MultiplyBlending, SubtractiveBlending, NoBlending to work but all that I get are white squares:
When I use AdditiveBlending I get something that works:
fragment shader:
uniform sampler2D diffuseTexture;
varying vec4 vColor;
varying vec2 vAngle;
void main() {
vec2 coords = (gl_PointCoord - 0.5) * mat2(vAngle.x, vAngle.y, -vAngle.y, vAngle.x) + 0.5;
gl_FragColor = texture2D(diffuseTexture, coords) * vColor;
}
vertex shader:
uniform float pointMultiplier;
attribute float size;
attribute float angle;
varying vec4 vColor;
varying vec2 vAngle;
void main() {
vec4 mvPosition = modelViewMatrix * vec4(position, 1.0);
gl_Position = projectionMatrix * mvPosition;
gl_PointSize = size * pointMultiplier / gl_Position.w;
vAngle = vec2(cos(angle), sin(angle));
vColor = color;
}
This is how I am creating the material and geometry in the TypeScript:
this.material = new Three.ShaderMaterial({
uniforms: {
diffuseTexture: {
value: new Three.TextureLoader().load(basicParticle)
},
pointMultiplier: {
value: window.innerHeight / (2.0 * Math.tan(0.5 * 60.0 * Math.PI / 180.0))
}
},
vertexShader,
fragmentShader,
blending: Three.MultiplyBlending,
depthTest: true,
depthWrite: false,
transparent: true,
vertexColors: true
});
this.geometry = new Three.BufferGeometry();
new Three.Points(this.geometry, this.material);
What is causing me to get white squares and how can I fix it? Is it the shaders, TypeScript, or both?

WebGL blur shader in three JS

That's kind of a long shot, but I'm hopelessly lost. I've started learning Three.JS some time ago and wanted to migrate my project from P5.js (as an exercise, but with idea to use it in the future). In P5.js I simply made a canvas and applied a shader to it, but in Three.JS it doesn't seem to work. So I figured out - I'll try it in shaders.
The idea:
to draw random circles (on transparent background)
blur the circles
use the outcome as a texture.
So far, I've managed to draw the circles (not random yet, but still working on it) and use it as a texture.
JS part:
const geometry = new THREE.PlaneGeometry( 1, 1, 1 );
const material = new THREE.ShaderMaterial( {
uniforms: {
iP: 0,
dl: { value : new THREE.Vector2(.6, 0), },
spots : { value : 5.0 },
offset : { value : new THREE.Vector2(0.5, 0.5) },
radius : { value : .25 },
},
vertexShader: _VS,
fragmentShader: _FS,
transparent: true
});
Vert shader part:
const _VS = `
precision mediump float;
attribute vec3 aPosition;
varying vec2 vTexCoord;
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);
}
`;
Fragment shader part:
// fragment shader
const _FS = `
precision mediump float;
varying vec2 vTexCoord;
varying vec2 vUv;
uniform float spots;
uniform vec2 offset;
uniform float radius;
// uniform sampler2D iP; // canvas to be blurred
uniform vec2 dl;
const float Pi = 6.28318530718;
float rnd(vec3 scale, float seed) {
return fract(sin(dot(gl_FragCoord.xyz + seed, scale)) * 43758.5453 + seed);
}
void main() {
// CIRCLE;
vec4 color1 = vec4(.1, .5, .3, 1.0);
vec4 color2 = vec4(0., 0., 0., 0.01);
vec2 shift = offset;
for (float t = 0.0; t <= spots; t++) {
float p = smoothstep(radius, radius + .0001, length(vUv - shift));
vec4 col = mix(color1, color2, vec4(p));
gl_FragColor += col;
shift.x += .05;
shift.y -= .01;
}
// BLUR
vec4 col = vec4(0.0);
float tt = 0.0;
float off = rnd(vec3(12.9898, 78.233, 151.7182), 0.0);
for (float t = -30.0; t <= 30.0; t++) {
float pc = (t + off - 0.5) / 30.0;
float w = 1.0 - abs(pc);
vec4 spl = texture2D(iP, vTexCoord + dl * pc);
spl.rgb *= spl.a;
col += spl * w;
tt += w;
}
gl_FragColor = col / tt;
gl_FragColor.rgb /= gl_FragColor.a + 0.00001;
}`;
I have an issue with the line
vec4 spl = texture2D(iP, vTexCoord + dl * pc);. I don't know how I could use created circles on gl_FragColor.
I've spent hours reading and looking for a solution, but I didn't find any help.
I'd truly appreciate any directions or solutions!
Thanks in advance!
You seem to be mixing a lot of variable names between P5.js and Three.js. You should just look at the way Three.js does it, and try to forget the P5.js terminology because they're not the same. Here's an official example of a simple shader setup, you could look at the source code there.
I've copied that demo below with a much more simplified shader so you can get a feel of how to pass a time uniform from JavaScript, and read it in GLSL:
body, html {
margin: 0;
}
<div id="container"></div>
<!-- Import maps polyfill -->
<!-- Remove this when import maps will be widely supported -->
<script async src="https://unpkg.com/es-module-shims#1.3.6/dist/es-module-shims.js"></script>
<script type="importmap">
{
"imports": {
"three": "https://cdn.jsdelivr.net/npm/three#0.142.0/build/three.module.js",
}
}
</script>
<script type="module">
import * as THREE from "https://cdn.jsdelivr.net/npm/three#0.142.0/build/three.module.js";
let camera, scene, renderer;
let uniforms;
init();
animate();
function init() {
const container = document.getElementById( 'container' );
camera = new THREE.OrthographicCamera( - 1, 1, 1, - 1, 0, 1 );
scene = new THREE.Scene();
const geometry = new THREE.PlaneGeometry( 2, 2 );
uniforms = {
time: { value: 1.0 }
};
const _VS = `
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = vec4( position, 1.0 );
}
`;
const _FS = `
varying vec2 vUv;
uniform float time;
void main() {
float blue = sin(time * 5.0 + vUv.x * 10.0) * 0.5 + 0.5;
gl_FragColor = vec4( vUv.x, vUv.y, blue, 1.0 );
}
`;
const material = new THREE.ShaderMaterial( {
uniforms: uniforms,
vertexShader: _VS,
fragmentShader: _FS
} );
const mesh = new THREE.Mesh( geometry, material );
scene.add( mesh );
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio( window.devicePixelRatio );
container.appendChild( renderer.domElement );
onWindowResize();
window.addEventListener( 'resize', onWindowResize );
}
function onWindowResize() {
renderer.setSize( window.innerWidth, window.innerHeight );
}
//
function animate() {
requestAnimationFrame( animate );
uniforms[ 'time' ].value = performance.now() / 1000;
renderer.render( scene, camera );
}
</script>
From here you should be able to extrapolate and add other types of uniforms, including float, vec2, sampler2D, etc. Finally, here's a bit more details on which uniforms, attributes, and naming conventions are used by default when you use ShaderMaterial.
Every effect (post-process), that you want to apply on your whole scene, you should pass into EffectComposer.
Here is documentation: https://threejs.org/docs/#manual/en/introduction/How-to-use-post-processing.
Also sometimes you can apply some effects by setting css-rules like blur on the renderer element like this:
renderer.domElement.style.filter = `blur(10px)`;

Make light independent from the view in a Phong model

I'm trying to implement the Phong shading model, but I come across something quite strange. When I change the viewing position, it looks like the light behaves differently, as if it was dependent from the view. Like, if I'm close to the object I only see the effects of the ambient light, while if I go far away from it I start seeing the diffuse's contribution.
These are my shaders:
//Vertex Shader
attribute vec4 vPosition;
attribute vec4 vNormal;
varying vec3 N, L, E;
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
uniform vec4 lightPosition;
void main()
{
vec3 pos = -(modelViewMatrix * vPosition).xyz;
vec3 light = lightPosition.xyz;
L = normalize(light - pos);
E = -pos;
N = normalize((modelViewMatrix * vNormal).xyz);
gl_Position = projectionMatrix * modelViewMatrix * vPosition;
}
//Fragment Shader
uniform vec4 ambientProduct;
uniform vec4 diffuseProduct;
uniform vec4 specularProduct;
uniform float shininess;
varying vec3 N, L, E;
void main()
{
vec4 fColor;
vec3 H = normalize(L + E);
vec4 ambient = ambientProduct;
float Kd = max(dot(L, N), 0.0);
vec4 diffuse = Kd * diffuseProduct;
float Ks = pow(max(dot(N, H), 0.0), shininess);
vec4 specular = Ks * specularProduct;
if (dot(L, N) < 0.0) {
specular = vec4(0.0, 0.0, 0.0, 1.0);
}
fColor = ambient + diffuse + specular;
fColor.a = 1.0;
gl_FragColor = fColor;
}
What am I doing wrong? How can I make the light behave independently from the viewer position?
Update 1:
After #Rabbid76's answer I edited the vertex shader by adding these lines (as well as passing the separate model and view matrices but I'll omit that for brevity's sake):
vec3 pos = (modelViewMatrix * vPosition).xyz;
vec3 light = (viewMatrix * lightPosition).xyz;
And I also updated the calculation of the N vector as the previous way of doing it seemed to not actually allow a per-fragment shading:
N = normalize(mat3(modelViewMatrix) * vNormal.xyz);
Still, the shade seems to move along with the rotation of the camera. This could be related to the fact that the light is multiplied by the viewMatrix I guess?
The calculation of the light vector is wrong.
L = normalize(light - pos);
While pos is a position in view space, light is a position in world space. light is the position of the light in the world. So light - pos doesn't make any sense at all. Both vectors have to be related to the same reference systems.
Transform the position of the light source by the view matrix, before you set it to the uniform lightPosition, to solve the issue.
Of course the transformation can also be done in shader code:
uniform mat4 viewMatrix;
uniform mat4 modelViewMatrix;
uniform vec4 lightPosition;
void main()
{
vec3 pos = (modelViewMatrix * vPosition).xyz;
vec3 light = (viewMatrix * lightPosition).xyz;
L = normalize(light - pos);
// ...
}
Further note that the position in view space has not to be inverted. It has to be
vec3 pos = (modelViewMatrix * vPosition).xyz;
rather than
vec3 pos = -(modelViewMatrix * vPosition).xyz;
A working snippet in your question is always helpful!
Issues
The light and the position need to be in the same space.
Those could be world space or view space but they need to be the same space.
The code had the position E in view space but the lightPosition in world space
You can't multiply a normal by a modelViewMatrix
You need to remove the translation. You also potentially need to deal
with scaling issues. See this article
The code is computing values in the vertex shader so they will be interpolated as they get passed to the fragment shader. That means they will no longer be unit vectors so you need to re-normalize them.
In computing the half vector you need to add their directions
The code was adding L (the direction from the surface to the light) to the view position of the surface instead of the direction from the surface to the view.
In computing a surface to light direction that would be light - pos but the code was negating pos. Of course you also need pos to be negative for the surface to view direction E
const gl = document.querySelector('canvas').getContext('webgl');
const m4 = twgl.m4;
const vs = `
attribute vec4 vPosition;
attribute vec4 vNormal;
varying vec3 N, L, E;
uniform mat4 viewMatrix;
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
uniform vec4 lightPosition;
void main()
{
vec3 pos = (modelViewMatrix * vPosition).xyz;
vec3 light = (viewMatrix * lightPosition).xyz;
L = light - pos;
E = -pos;
N = mat3(modelViewMatrix) * vNormal.xyz;
gl_Position = projectionMatrix * modelViewMatrix * vPosition;
}
`;
const fs = `
precision highp float;
uniform vec4 ambientProduct;
uniform vec4 diffuseProduct;
uniform vec4 specularProduct;
uniform float shininess;
varying vec3 N, L, E;
void main()
{
vec4 fColor;
vec3 normal = normalize(N);
vec3 surfaceToLightDir = normalize(L);
vec3 surfaceToViewDir = normalize(E);
vec3 H = normalize(surfaceToLightDir + surfaceToViewDir);
vec4 ambient = ambientProduct;
float Kd = max(dot(surfaceToLightDir, normal), 0.0);
vec4 diffuse = Kd * diffuseProduct;
float Ks = pow(max(dot(normal, H), 0.0), shininess);
vec4 specular = Ks * specularProduct;
if (dot(surfaceToLightDir, normal) < 0.0) {
specular = vec4(0.0, 0.0, 0.0, 1.0);
}
fColor = ambient + diffuse + specular;
fColor.a = 1.0;
gl_FragColor = fColor;
}
`;
// compiles shaders, links program, looks up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
const vertices = twgl.primitives.createSphereVertices(
2, // radius
8, // subdivision around
6, // subdivisions down
);
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData
const bufferInfo = twgl.createBufferInfoFromArrays(gl, {
vPosition: vertices.position,
vNormal: vertices.normal,
indices: vertices.indices,
});
function render(time) {
time *= 0.001; // convert to seconds
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
gl.useProgram(programInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
const projectionMatrix = m4.perspective(
60 * Math.PI / 180, // field of view
gl.canvas.clientWidth / gl.canvas.clientHeight, // aspect
0.1, // znear
100, // zfar
);
const eye = [
Math.sin(time) * 5,
3,
3 + Math.cos(time) * 5,
];
const target = [0, 2, 3];
const up = [0, 1, 0];
const cameraMatrix = m4.lookAt(eye, target, up);
const viewMatrix = m4.inverse(cameraMatrix);
const worldMatrix = m4.translation([0, 2, 3]);
const modelViewMatrix = m4.multiply(viewMatrix, worldMatrix);
const uniforms = {
viewMatrix,
modelViewMatrix,
projectionMatrix,
lightPosition: [4, 3, 1, 1],
ambientProduct: [0, 0, 0, 1],
diffuseProduct: [1, 1, 1, 1],
specularProduct: [1, 1, 1, 1],
shininess: 50,
};
// calls gl.uniformXXX
twgl.setUniforms(programInfo, uniforms);
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, bufferInfo);
// -- not important to answer --
drawLightAndGrid(uniforms)
requestAnimationFrame(render);
}
requestAnimationFrame(render);
// -- ignore below this line. The only point is to give a frame
// of reference.
let gridBufferInfo;
function drawLightAndGrid(sphereUniforms) {
if (!gridBufferInfo) {
const vPosition = [];
const s = 100;
for (let x = -s; x <= s; x += 2) {
vPosition.push(x, 0, -s);
vPosition.push(x, 0, s);
vPosition.push(-s, 0, x);
vPosition.push( s, 0, x);
}
gridBufferInfo = twgl.createBufferInfoFromArrays(gl, {
vPosition,
vNormal: { value: [0, 0, 0], }
});
}
const worldMatrix = m4.translation(sphereUniforms.lightPosition);
m4.scale(worldMatrix, [0.1, 0.1, 0.1], worldMatrix);
const uniforms = Object.assign({}, sphereUniforms, {
modelViewMatrix: m4.multiply(sphereUniforms.viewMatrix, worldMatrix),
ambientProduct: [1, 0, 0, 1],
diffuseProduct: [0, 0, 0, 0],
specularProduct: [0, 0, 0, 0],
});
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
twgl.setUniforms(programInfo, uniforms);
twgl.drawBufferInfo(gl, bufferInfo);
twgl.setBuffersAndAttributes(gl, programInfo, gridBufferInfo);
twgl.setUniforms(programInfo, {
modelViewMatrix: sphereUniforms.viewMatrix,
ambientProduct: [0, 0, 1, 1],
});
twgl.drawBufferInfo(gl, gridBufferInfo, gl.LINES);
}
canvas { border: 1px solid black }
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>
For me personally I find short cryptic variable names hard to follow but that's a personal preference.

Can't manage to make a shader running on three.js from shaderfrom

I would like to import that shader on my project:
https://shaderfrog.com/app/view/2447
This is my fragment shader:
<script id="fragmentShader" type="x-shader/x-fragment">
#ifdef GL_ES
precision highp float;
precision highp int;
#endif
uniform vec2 u_resolution;
uniform vec2 u_mouse;
uniform float u_time;
uniform sampler2D texture1;
// Example varyings passed from the vertex shader
varying vec3 vPosition;
varying vec3 vNormal;
varying vec2 vUv;
void main() {
vec3 color = vec3(texture2D(texture1, vUv));
vec3 outcolor=color;
float v = u_time + (vUv.x*0.5 + vUv.y*0.5);
vec2 Uv2 = vec2(color.r+color.b+v,color.g+color.b+v);
outcolor = vec3(texture2D(texture1, Uv2));
gl_FragColor = vec4( outcolor, 1.0 );
}
</script>
And this is my vertex shader:
<script id="vertexShader" type="x-shader/x-vertex">
varying vec3 vPosition;
varying vec3 vNormal;
varying vec2 vUv;
void main() {
vNormal = normal;
vUv = uv;
vPosition = position;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>
My shader is compiling, but I don't have the same result, I only have a highly frozen contrasted image. Plus, I do increment my uniform u_time value on a requestAnimationFrame function.
I can't see what I'm doing wrong?
I've simplified your code and made it work with the latest revision (r96).
Pay attention to the settings of the texture.
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera(60, window.innerWidth / window.innerHeight, 1, 1000);
camera.position.set(0, 0, 2);
var renderer = new THREE.WebGLRenderer();
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
var tex = new THREE.TextureLoader().load("https://threejs.org/examples/textures/UV_Grid_Sm.jpg");
tex.wrapS = THREE.RepeatWrapping;
tex.wrapT = THREE.RepeatWrapping;
var geo = new THREE.PlaneBufferGeometry(2, 2);
var mat = new THREE.ShaderMaterial({
uniforms:{
time: {value: 0},
texture1: {value: tex}
},
vertexShader:`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);
}
`,
fragmentShader: `
uniform float time;
uniform sampler2D texture1;
varying vec2 vUv;
void main() {
vec3 c = vec3(texture2D(texture1, vUv));
float v = time + (vUv.x*0.5 + vUv.y*0.5);
vec2 Uv2 = vec2(c.r + c.b+v, c.g + c.b + v);
vec3 outcolor = vec3(texture2D(texture1, Uv2));
gl_FragColor = vec4( outcolor, 1.0 );
}
`
});
var plane = new THREE.Mesh(geo, mat);
scene.add(plane);
var clock = new THREE.Clock();
var time = 0;
render();
function render(){
requestAnimationFrame(render);
time += clock.getDelta();
mat.uniforms.time.value = time;
renderer.render(scene, camera);
}
body{
overflow: hidden;
margin: 0;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/96/three.min.js"></script>

Explode tessellation shader textureCube drawing not real texture

I want to make explode effect like this (http://threejs.org/examples/#webgl_modifier_tessellation), but for cube with panorama textures (cube with 6 images).
I transform vertex shader to this:
uniform float amplitude;
attribute float displacement;
varying vec3 vNormal;
varying vec2 vUv;
void main() {
vNormal = normal;
vUv = ( 0.5 + amplitude ) * uv + vec2( amplitude );
vec3 newPosition = position + amplitude * normal * vec3( displacement );
gl_Position = projectionMatrix * modelViewMatrix * vec4( newPosition, 1.0 );
}
and fragment to this by adding samplerCube and textureCube:
varying vec3 vNormal;
varying vec2 vUv;
uniform samplerCube texture1;
void main() {
vec3 light = vec3( 0.5, 0.2, 1.0 );
light = normalize( light );
float dProd = dot( vNormal, light ) * 0.5 + 0.5;
gl_FragColor = vec4( vec3( dProd ), 1.0 );
gl_FragColor = textureCube(texture1, vNormal);
}
But result doesn't looks good, it's shows color, but not real detailed texture (look texture http://pg2.paraplan.io/_pano/cube/1024/nx.jpg).
Can you help me with shader setting, or may be know how to do explode tessellation without shaders?
DEMO: http://pg2.paraplan.io/_pano/cube/scene.html

Categories

Resources