Recuperating pixel data from fragment shader results in unexpected behavior - javascript

I'm currently trying to use a fragment shader to transport some particle position data (and in the future, modify it). I have no problem sending the data to the shader using a sampler2D texture, but when I try to recuperate the data, My 20 particles suddenly have the wrong positions. I've printed as many outputs as possible and have minimized the code as much as I can yet still fail to understand where I'm wrong.
a reproducible minimized version is available on the p5js website here
Here is my sketch.js :
let theShader;
let shaderTexture;
let NUM_PARTICLES = 20;
let particleTexture;
let particles = [];
function preload(){
theShader = loadShader('basic.vert', 'basic.frag');
}
function setup() {
createCanvas(400, 400, WEBGL);
// Initiate Particles
for(let i = 0; i < NUM_PARTICLES;i++){
particles.push(createVector(i*20,i*20,0));
}
// Initialize Shader
shaderTexture = createGraphics(NUM_PARTICLES, 1, WEBGL);
shaderTexture.noStroke();
// Create Particle Texture
particleTexture = createImage(NUM_PARTICLES, 1);
// Fill Particle Texture
particleTexture.loadPixels();
for (let i = 0; i < NUM_PARTICLES; i++) {
particleTexture.pixels[i*4+0] = map(particles[i].x,0,width,0,255); // R
particleTexture.pixels[i*4+1] = map(particles[i].y,0,height,0,255); // G
particleTexture.pixels[i*4+2] = 0; // B
particleTexture.pixels[i*4+3] = 255; // A
}
particleTexture.updatePixels();
}
function draw() {
translate(-width/2, -height/2);
background(255);
// Display Particles Before Modification
for(let i = 0; i < NUM_PARTICLES;i++){
circle(particles[i].x,particles[i].y,10); // draw circle at particle location
}
// Apply Texture
shaderTexture.shader(theShader); // set shader
theShader.setUniform('text', particleTexture); // send particleTexture to shader
shaderTexture.rect(0,0,NUM_PARTICLES,1); // set rect to recieve shader out
// Print Shader Output
for(let i = 0; i < NUM_PARTICLES;i++){
let newPos = shaderTexture.get(i, 0);
print(newPos);
}
// Update and Display Particles
for(let i = 0; i < NUM_PARTICLES;i++){
let newPos = shaderTexture.get(i, 0);
particles[i].x = map(newPos[0],0,255,0,width);
particles[i].y = map(newPos[1],0,255,0,height);
fill(255,0,0);
circle(particles[i].x,particles[i].y,10);
}
noLoop();
}
and here is my fragment shader which should not be modifying anything :
#ifdef GL_ES
precision highp float;
#endif
uniform sampler2D text;
void main() {
vec2 particle = texture2D(text, vec2(gl_FragCoord.x, gl_FragCoord.y)).xy;
gl_FragColor = vec4(particle.x,particle.y,0.0,1.0); // R,G,B,A
}
also my vertex shader which is default :
#ifdef GL_ES
precision highp float;
#endif
attribute vec3 aPosition;
void main() {
vec4 positionVec4 = vec4(aPosition, 1.0);
positionVec4.xy = positionVec4.xy * 2.0 - 1.0;
gl_Position = positionVec4;
}

When looking up a texture with texture2D the texture coordinates must be specified in range [0.0, 1.0]. (0, 0) is the bottom left and (1, 1) is the top right. However gl_FragCoord.xy contains window coordinates, with top left (0.5, 0.5) and top right (width-0.5, height-0.5).
Hence you need to divide gl_FragCoord by the size of the viewport. Since you draw on a rectangle with the size NUM_PARTICLESx1 (width == NUM_PARTICLES, height = 1), you must divide gl_FragCoord.x by NUM_PARTICLES:
vec2 particle = texture2D(text, vec2(gl_FragCoord.x, gl_FragCoord.y)).xy;
vec2 particle = texture2D(text, vec2(gl_FragCoord.x/20.0, 0.0)).xy;
Alternatively you can add a size uniform to the fragment shader and divide gl_FragCoord.xy by size:
#ifdef GL_ES
precision highp float;
#endif
uniform sampler2D text;
uniform vec2 size;
void main() {
vec2 particle = texture2D(text, gl_FragCoord.xy / size).xy;
gl_FragColor = vec4(particle.x,particle.y,0.0,1.0); // R,G,B,A
}
Set the value of the size uniform by [NUM_PARTICLES, 1]:
// Apply Texture
shaderTexture.shader(theShader); // set shader
theShader.setUniform('text', particleTexture); // send particleTexture to shader
theShader.setUniform('size', [NUM_PARTICLES, 1]);
shaderTexture.rect(0,0,NUM_PARTICLES,1); // set rect to recieve shader out
If you are using OpenGL ES Shading Language 3.00 you have the option to use texelFetch to lookup the texture with integral pixel coordinates or get the size of the texture using textureSize.
Unfortunately p5.js (createCanvas()) doesn't seem to provide a WebGL 2.0 context, so this is not an option.

Related

Unintended Behavior of Vertex Color

I'm trying to create a vertex color change feature using varying which is intended to give a result of gradient appearance of each vertex like this
Meanwhile after inserting such a color combination like this
...
this.vertices[0].setColor([255,0,0,1]); // red
this.vertices[1].setColor([255,0,0,1]); // red
this.vertices[2].setColor([255,0,0,1]); // red
this.vertices[3].setColor([255,255,0,1]); // yellow
...
The result is like this
It is supposed to be intended as a yellow gradient only on bottom left side of the shape, but instead, a half triangle was created.
Here is my shader C source code
const vSource = `
attribute vec4 vPosition;
attribute vec4 vColor;
varying vec4 fColor;
void main() {
gl_Position = vPosition;
fColor = vColor;
}
`;
const fSource = `
precision mediump float;
varying vec4 fColor;
void main() {
gl_FragColor = fColor;
}
`;
And here is the implementation of render function
render = (gl, program, vBuffer, cBuffer) => {
const vertices = [];
const colors = [];
for (let j = 0; j < this.vertices.length; j++) {
vertices.push(this.vertices[j].coordinate);
colors.push(this.vertices[j].color);
}
gl.bindBuffer(gl.ARRAY_BUFFER, vBuffer);
gl.bufferData(gl.ARRAY_BUFFER, flatten(vertices), gl.STATIC_DRAW);
const vPosition = gl.getAttribLocation(program, "vPosition");
gl.vertexAttribPointer(vPosition,2,gl.FLOAT,false,0,0);
gl.enableVertexAttribArray(vPosition);
gl.bindBuffer(gl.ARRAY_BUFFER, cBuffer);
gl.bufferData(gl.ARRAY_BUFFER, flatten(colors), gl.STATIC_DRAW);
const vColor = gl.getAttribLocation(program, "vColor");
gl.vertexAttribPointer(vColor,4,gl.FLOAT,false,0,0);
gl.enableVertexAttribArray(vColor);
if (this.shape == "line") {
gl.drawArrays(gl.LINES, 0, vertices.length);
} else {
gl.drawArrays(gl.TRIANGLE_FAN, 0, vertices.length); // in this case, it is going to be TRIANGLE_FAN primitive
}
};
Would be thankful if you guys can catch something odd that might result in the bug.
P.S.: I have implemented shape creation such as polygon, transformation, and such, the coordinate-related action seems perfect, I just couldn't comprehend the problem that caused the vertices color to be like this.
The values of the color channels are floating point numbers in range [0.0, 1.0], but not integral numbers in range [0, 255]. e.g.:
this.vertices[0].setColor([1,0,0,1]); // red
this.vertices[1].setColor([0,1,0,1]); // green
this.vertices[2].setColor([1,0,1,1]); // blue
this.vertices[3].setColor([1,1,0,1]); // yellow

Three.js : Modify the UV of my texture inside a custom ShaderMaterial

I've a plane geometry and I'm creating a CustomShader material related to it. It will receive some textures as uniforms. I'd like the textures to perfectly cover my plane (like the background-size:cover css property)
I managed to do it with an utility function when I used my textures with a MeshBasicMaterial :
cover( texture, aspect ) {
var imageAspect = texture.image.width / texture.image.height;
if ( aspect < imageAspect ) {
texture.matrix.setUvTransform( 0, 0, aspect / imageAspect, 1, 0, 0.5, 0.5 );
} else {
texture.matrix.setUvTransform( 0, 0, 1, imageAspect / aspect, 0, 0.5, 0.5 );
}
}
But unfortunately since I'm using the ShaderMaterial, my "cover" function doesn't apply anymore. Am I force to do it inside my fragment shader? If so how can I manage to reproduce this behavior ?
Here's my code :
const vertexShader = `
precision highp float;
uniform mat3 uUvTransform;
varying vec2 vUv;
void main() {
vUv = ( uUvTransform * vec3( uv, 1 ) ).xy;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`;
const fragmentShader = `
precision highp float;
uniform sampler2D uText1;
varying vec2 vUv;
void main() {
vec2 xy = vUv;
vec4 color = texture2D(uText1,xy);
gl_FragColor = color;
}`;
And here's my current result :
Thanks a lot
You could simply use a custom uniform, e.g. :
uniform sampler2D uText1;
uniform vec2 uUvScale;
varying vec2 vUv;
void main() {
vec2 uv = (vUv - 0.5) * uUvScale + 0.5;
gl_FragColor = texture2D(uText1, uv);
}
And :
var imageAspect = texture.image.width / texture.image.height;
if ( aspect < imageAspect ) {
material.uniforms.uUvScale.value.set(aspect / imageAspect, 1)
} else {
material.uniforms.uUvScale.value.set(1, imageAspect / aspect)
}
The way Three.js handles texture transformations like .offset, .repeat, .rotation, .center is via a Matrix3 that gets passed as a uniform into the vertex shader. The vertex shader performs the matrix multiplication, then passes the modified UVs as a varying to the fragment shader.
You can see that uniform being declared in the uv_pars_vertex.glsl.js file
You can see the transform being applied in the uv_vertex.glsl.js file
You could copy those lines of GLSL code to your ShaderMaterial's vertex shader, and I think the texture properties will come through in the Matrix3 automatically. However, if for some reason it doesn't, you could recreate the Matrix3 by copying it from the source and passing it as a uniform manually. I don't know what your utility function looks like, so it's hard to tell how you're achieving the desired scaling.

Creating an efficient early instance clip WebGL2 vertext shader

Discarding instances in the vertex shader
I am using instanced geom to display content using webGL2. As part of the process each instance has a color component which for some instances may have an alpha value of zero.
Rather than have it passed on to the fragment shader to discard, I check alpha in the vertext shader. If zero then I output each vert as vec4(-2) to put it outside the clip or at worst have it as a 1 pixel point.
I can not find information on how this is handled by the rendering pipe.
Is this the best strategy to remove instances from the pipeline?
The alternative is to remove the instances from the buffer which in JS is a CPU intensive operation when dealing with 1000's of instances.
The shaders
const vertexSrc = `#version 300 es
#define alphaCut 0.0
in vec2 verts;
in vec2 pos;
in vec2 scale;
in vec2 offset;
in float rotate;
in float zIdx; // z index for zBuf clip only
in vec4 color; // RGBA to color.a == 0.0 to remove not render
in uint spriteIdx;
uniform vec4 sheetLayout[128];
uniform vec2 sheetSize;
uniform mat2 view;
uniform vec2 origin;
out vec2 spr;
out vec4 col;
void main() {
if (color.a <= alphaCut) {
gl_Position = vec4(-2); // put this instance outside clip
return;
}
col = color;
vec4 sprite = sheetLayout[spriteIdx];
spr = sprite.xy + verts * sprite.zw / sheetSize;
vec2 loc = (verts - offset) * scale * sprite.zw;
float xdx = cos(rotate);
float xdy = sin(rotate);
loc = view * (vec2(loc.x * xdx - loc.y * xdy, loc.x * xdy + loc.y * xdx) + pos - origin);
gl_Position = vec4(loc, zIdx, 1);
}`;
const fragmentSrc = `#version 300 es
precision mediump float;
uniform sampler2D tex;
#define alphaCut 0.0;
in vec2 spr;
in vec4 col;
out vec4 pixel;
void main() {
pixel = texture(tex, spr) * col;
if (pixel.a <= alphaCut) { discard; }
}`;
As pointed out in the question's comments and the deleted answer, to move the vertex outside the clip space requires gl_Position = vec4(vec3(-2), 1)
Setting gl_Position = vec4(-2) will put the vertex at vec3(1) (top right on the near plane). This is inside the clip area and thus instance geom will end up in the fragment shader.
But why?
Perspective division
When the vertex moves from the vertex shader as gl_position it is as a 4D vector. To clip the vertex we need a 3D vector that is outside 3d clip space.
The normalization of the vertex from 4D to 3D is called perspective division and is performed automatically by dividing the vectors x, y and z components by the w component.
Thus to correctly clip the geom instance and ensure it does not reach the fragment shader...
#define alphaCut 0.1
//...
void main() {
if (color.a <= alphaCut) {
gl_Position = vec4(vec3(2), 1);
return;
}
//... rest of code

How to get the number of pixels count according to some condition in GLSL/WebGL?

I am trying to get the count of the number of pixels whose value is below 0.89.
As the WebGL Shaders variable doesn't persist , I just tried a hack where I was computing the count in the last iteration or when texCoord is 1.0,1.0.
Currently I am trying the below code , is this correct and do we have a better approach for this?
`
void main() {
//gl_FragColor = vec4(texture2D(u_texture, vec2(mytexcoord.x, mytexcoord.y)).rgb, 1.0);
vec2 uv = mytexcoord;
float ins = texture2D(u_texture, vec2(uv.x, 1.0 - uv.y)).r;
float neglectIntensityValue = 0.89453125;
float blueVal = 0.00000000;
vec2 onePixel = vec2(1.0, 1.0) / u_resolution;
int pixelCount = 0;
if (vec2(1.0) == mytexcoord) {
//last iteration
//loop through all the pixels to know the count
const int totalPixels = 100000;//int(u_resolution.x * u_resolution.y);
for (int i = 0; i < totalPixels; i++) {
vec2 uv = vec2((float(i)) / 100000.0, 0.5);
vec4 colorData = vec4(texture2D(u_texture, uv).rgb, 1.0);
if (colorData.r < neglectIntensityValue) {
pixelCount++;
}
}
}
if (pixelCount > 1000) {
gl_FragColor = vec4(ins, ins, ins, 1.0);
} else {
ins = 1.0 - ins;
gl_FragColor = vec4(ins, ins, ins, 1.0);
}
}
`
There's no way to do this more efficiently using only the fragment shader. Your pixel shader may crash on large textures, as it's not intended to be run for some long. You could randomly sample a subset of the pixels as an approximation, if that is sufficient.
For an efficient implementation, you could have the shader output a 1 or 0 into a render texture, and then aggregate that using log(n) pass of a different pixel shader, summing 2x2 pixels (or 4x4, or more) into 1 pixel in each pass. Obviously that would be quite a bit more complex to implement.

Three.js Verlet Cloth Simulation on GPU: Can't follow my logic for finding bug

I got a problem understanding the logic I am trying to implement with Three.js and the GPUComputationRenderer by yomboprime.
(https://github.com/yomboprime/GPGPU-threejs-demos/blob/gh-pages/js/GPUComputationRenderer.js)
I want to make a simple Verlet-Cloth-Simulation. Here is the logic I was already able to implement (short version):
1) Position-Fragment-Shader: This shader takes the old and current position texture and computes the new position like this:
vec3 position = texture2D( texturePosition, uv ).xyz;
vec3 oldPosition = texture2D( textureOldPosition, uv ).xyz;
position = (position * 2.0 - oldPosition + acceleration * delta *delta )
t = checkConstraints(position);
position += t;
gl_FragColor = vec4(position,1);
2) Old-Position-Shader This shader just saves the current position and saves it for the next step.
vec3 position = texture2D( texturePosition, uv ).xyz;
gl_FragColor = vec4(position,1);
This works fine, but with that pattern it's not possible to calculate the constraints more than once in one step, because each vertex is observed separately and cannot see the change of position that other pixels would have done in the first iteration.
What I am trying to do is to separate the constraints from the verlet. At the moment it looks somehow like this:
1) Position-Shader (texturePosition)
vec3 position = texture2D( textureConstraints, uv ).xyz;
vec3 oldPosition = texture2D( textureOldPosition, uv ).xyz;
position = (position * 2.0 - oldPosition + acceleration * delta *delta );
gl_FragColor = vec4(position, 1 );
2) Constraint-Shader (textureConstraints)
vec3 position = texture2D( texturePosition, uv ).xyz;
t = checkConstraints(position);
position += t;
gl_FragColor = vec4(position,1);
3) Old-Position-Shader (textureOldPosition)
vec3 position = texture2D( textureConstraints, uv ).xyz;
gl_FragColor = vec4(position,1);
This logic is not working, even if I don't calculate constraints at all and just pass the values like they were before. As soon as some acceleration is added in the Position-Shader the position values are exploding into nowhere.
What am I doing wrong?
This example is not verlet cloth, but I think the basic premise may help you. I have a fiddle that uses the GPUComputationRender to accomplish some spring physics on a point cloud. I think you could adapt it to your needs.
What you need is more information. You'll need fixed references to the cloth's original shape (as if it were a flat board) as well as the force currently being exerted on any of those points (by gravity + wind + structural integrity or whatever else), which then gives you that point's current position. Those point references to its original shape in combination with the forces are what will give your cloth a memory instead of flinging apart as it has been.
Here, for example, is my spring physics shader which the GPUComputationRenderer uses to compute the point positions in my visualization. The tOffsets in this case are the coordinates that give the cloud a permanent memory of its original shape - they never change. It is a DataTexture I add to the uniforms at the beginning of the program. Various forces like the the mass, springConstant, gravity, and damping also remain consistent and live in the shader. tPositions are the vec4 coords that change (two of the coords record current position, the other two record current velocity):
<script id="position_fragment_shader" type="x-shader/x-fragment">
// This shader handles only the math to move the various points. Adding the sprites and point opacity comes in the following shader.
uniform sampler2D tOffsets;
uniform float uTime;
varying vec2 vUv;
float hash(float n) { return fract(sin(n) * 1e4); }
float noise(float x) {
float i = floor(x);
float f = fract(x);
float u = f * f * (3.0 - 2.0 * f);
return mix(hash(i), hash(i + 1.0), u);
}
void main() {
vec2 uv = gl_FragCoord.xy / resolution.xy;
float damping = 0.98;
vec4 nowPos = texture2D( tPositions, uv ).xyzw;
vec4 offsets = texture2D( tOffsets, uv ).xyzw;
vec2 velocity = vec2(nowPos.z, nowPos.w);
float anchorHeight = 100.0;
float yAnchor = anchorHeight;
vec2 anchor = vec2( -(uTime * 50.0) + offsets.x, yAnchor + (noise(uTime) * 30.0) );
// Newton's law: F = M * A
float mass = 24.0;
vec2 acceleration = vec2(0.0, 0.0);
// 1. apply gravity's force:
vec2 gravity = vec2(0.0, 2.0);
gravity /= mass;
acceleration += gravity;
// 2. apply the spring force
float restLength = yAnchor - offsets.y;
float springConstant = 0.2;
// Vector pointing from anchor to point position
vec2 springForce = vec2(nowPos.x - anchor.x, nowPos.y - anchor.y);
// length of the vector
float distance = length( springForce );
// stretch is the difference between the current distance and restLength
float stretch = distance - restLength;
// Calculate springForce according to Hooke's Law
springForce = normalize(springForce);
springForce *= (springConstant * stretch);
springForce /= mass;
acceleration += springForce;
velocity += acceleration;
velocity *= damping;
vec2 newPosition = vec2(nowPos.x - velocity.x, nowPos.y - velocity.y);
// Write new position out
gl_FragColor = vec4(newPosition.x, newPosition.y, velocity.x, velocity.y);
}
</script>

Categories

Resources