How to use a depth texture in webgl2 - javascript

When you only want the color part of the frame, you can store depth in a render buffer.
This works fine for me on both webgl1 and webgl2.
When you also need to use the depth part you can't use a render buffer, you need to store it in a texture.
In webgl1 you need to get an extension to support it.
This also works fine for me in webgl1.
const gl = canvas.getContext('webgl');
const depthTextureExtension = gl.getExtension('WEBGL_depth_texture');
if (!depthTextureExtension) {
alert('Depth textures not supported');
}
This feature should be available by default in webgl2.
Yet when I replace the code above with this:
const gl = canvas.getContext('webgl2');
My code fails to render and I can not find any explanation why.
this.texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, this.texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
this.depth = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, this.depth);
checkGLErrors(gl); // OK
gl.texImage2D(gl.TEXTURE_2D, 0, gl.DEPTH_COMPONENT, width, height, 0, gl.DEPTH_COMPONENT, gl.UNSIGNED_SHORT, null);
checkGLErrors(gl); // for webgl2: 1281 INVALID_VALUE
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
this.framebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, this.texture, 0);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.TEXTURE_2D, this.depth, 0);

In WebGL2 DEPTH_COMPONENT is not a valid internal format. Use DEPTH_COMPONENT16, DEPTH_COMPONENT24 or DEPTH_COMPONENT32F
Example from here
const level = 0;
const internalFormat = gl.DEPTH_COMPONENT24;
const border = 0;
const format = gl.DEPTH_COMPONENT;
const type = gl.UNSIGNED_INT;
const data = null;
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
targetTextureWidth, targetTextureHeight, border,
format, type, data);

Related

WebGL: INVALID_VALUE: texImage2D: invalid internalformat - depthTexture in webgl2

const depthTextures = gl => {
const depthTexture = gl.createTexture();
const depthTextureSize = 512;
gl.bindTexture(gl.TEXTURE_2D, depthTexture);
gl.texImage2D(gl.TEXTURE_2D, // target
0, // mip level
gl.DEPTH_COMPONENT, // internal format
depthTextureSize, // width
depthTextureSize, // height
0, // border
gl.DEPTH_COMPONENT, // format
gl.UNSIGNED_INT, // type
null); // data
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
const depthFramebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, depthFramebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, // target
gl.DEPTH_ATTACHMENT, // attachment point
gl.TEXTURE_2D, // texture target
depthTexture, // texture
0); // mip level
// create a color texture of the same size as the depth texture
// see article why this is needed_
const unusedTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, unusedTexture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, depthTextureSize, depthTextureSize, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); // attach it to the framebuffer
gl.framebufferTexture2D(gl.FRAMEBUFFER, // target
gl.COLOR_ATTACHMENT0, // attachment point
gl.TEXTURE_2D, // texture target
unusedTexture, // texture
0); // mip level
return [depthFramebuffer, unusedTexture];
};
I found
Note: This extension is only available to WebGL1 contexts. In WebGL2,
the functionality of this extension is available on the WebGL2 context
by default. The constant in WebGL2 is gl.UNSIGNED_INT_24_8.
I change DEPTH_COMPONENT with RGBA but still no attaced framebuffer...
In other combination i get :
gl.texImage2D(
gl.TEXTURE_2D, // target
0, // mip level
gl.RGBA, // internal format
depthTextureSize, // width
depthTextureSize, // height
0, // border
gl.RGBA, // format
gl.UNSIGNED_INT_24_8, // type
null); // data
GL_INVALID_OPERATION: Invalid combination of format, type and internalFormat.
GL_INVALID_OPERATION: Only array uniforms may have count > 1.
Any suggestion ?
This is source which i wanna implement in my own already exist glmatrix project...
The internal format of a depth texture cannot be RGBA, it must be one of the size internal formats e.g. (GL_DEPTH_COMPONENT24, GL_DEPTH_COMPONENT32F) . If the source format is GL_DEPTH_COMPONENT, the source type must be either FLOAT or one of the unsigned integral types. The internal format must be one of the valid combinations together with the format and type, (see OpenGL ES 3.0 glTexImage2D). Valid textures for an gl.DEPTH_ATTACHMENT are therefore e.g.:
gl.texImage2D(
gl.TEXTURE_2D, 0,
gl.GL_DEPTH_COMPONENT24,
depthTextureSize, depthTextureSize, 0,
gl.GL_DEPTH_COMPONENT, gl.GL_UNSIGNED_INT,
null);
gl.texImage2D(
gl.TEXTURE_2D, 0,
gl.GL_DEPTH_COMPONENT32F,
depthTextureSize, depthTextureSize, 0,
gl.GL_DEPTH_COMPONENT, gl.GL_FLOAT,
null);
Alternatively, you can use a combined depth and stencil texture (in this case it is a gl.DEPTH_STENCIL_ATTACHMENT instead of a gl.DEPTH_ATTACHMENT), e.g:
gl.texImage2D(
gl.TEXTURE_2D, 0,
gl.GL_DEPTH24_STENCIL8,
depthTextureSize, depthTextureSize, 0,
gl.GL_DEPTH_STENCIL, gl.GL_UNSIGNED_INT_24_8,
null);

WebGL2 : incomplete framebuffer

I'm porting a C++/OpenGL application into WebGL and I'm trying to create and configure a framebuffer.
The framebuffer has 3 textures we can write into of types : vec2, vec2, uint. (gl.RG32F, gl.RG32F, gl.R32UI)
Here is how I initialize the framebuffer :
var gbuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, gbuffer);
var z0_texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, z0_texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RG32F, output_canvas.width, output_canvas.height, 0, gl.RG, gl.FLOAT, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, z0_texture, 0);
var zn_texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, zn_texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RG32F, output_canvas.width, output_canvas.height, 0, gl.RG, gl.FLOAT, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT1, gl.TEXTURE_2D, zn_texture, 0);
var n_texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, n_texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.R32UI, output_canvas.width, output_canvas.height, 0, gl.RED_INTEGER, gl.UNSIGNED_INT, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT2, gl.TEXTURE_2D, n_texture, 0);
gl.drawBuffers([gl.COLOR_ATTACHMENT0, gl.COLOR_ATTACHMENT1, gl.COLOR_ATTACHMENT2]);
The framebuffer is incomplete : gl.checkFramebufferStatus(gl.FRAMEBUFFER) returns gl.FRAMEBUFFER_INCOMPLETE_ATTACHMENT and shows a warning : Framebuffer not complete. (status: 0x8cd6) COLOR_ATTACHMENT1: Attachment has an effective format of RG32F, which is not renderable.
Any idea on what's wrong with my code?
I don't want to render these textures. I am using them in post-processing. That's what I did in C++ but can't do the same in WebGL.
No. You cannot do the same in WebGL.
The error meassage "Attachment has an effective format of RG32F, which is not renderable" - means that gl.RG32F is not a renderable format. Therefore you cannot use it for a framebuffer. You must use a color renderable format. See OpenGL ES 3.0 Specification - Table 3.13 (WebGL 2.0 conforms closely to OpenGL ES 3.0).
You can't do the same thing in WebGL as you can in desktop OpenGL. In desktop OpenGL RG32F is color renderable. However, WebGL is based on OpenGL ES. In OpenGL ES RG32 is not a color renderable format. "color renderable" does not mean that you can render these textures. That means you can render into a texture using this format.

Texture Coordinates are slightly off when rendering to FBO texture

I'm working on a WebGL OpenGL ES project that renders my scene to a texture and then uses that texture to render to the window.
I noticed that this causes an odd shimmering on the edges of textures. When I revert the project back to rendering directly to the window the shimmering goes away.
The FBO is set to the same size as the window and the texture is drawn to the window pixel for pixel.
What is causing this to happen? Is it fixable? I am using GL_NEAREST on the FBO texture and GL_LINEAR on all other textures. I'd rather not use GL_CLAMP_TO_EDGE as that would cause other problems.
[
Framebuffer creation
/* Returns true if a framebuffer object is created */
Display.prototype.createFramebuffer = function(name) {
var gl = this.gl; // Sanity Save
var size = 1024;
var fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
fb.width = size;
fb.height = size;
var tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, fb.width, fb.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
var rb = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, rb);
gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_COMPONENT16, fb.width, fb.height);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.RENDERBUFFER, rb);
if(gl.checkFramebufferStatus(gl.FRAMEBUFFER) !== gl.FRAMEBUFFER_COMPLETE) { return false; }
this.fbo[name] = {fb: fb, rb: rb, tex: tex};
gl.bindTexture(gl.TEXTURE_2D, null);
gl.bindRenderbuffer(gl.RENDERBUFFER, null);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
return true;
};
Before drawing scene
/* Draw Geometry */
gl.bindFramebuffer(gl.FRAMEBUFFER, this.fbo.world.fb); // Enable world framebuffer
gl.viewport(0, 0, this.window.width, this.window.height); // Resize to canvas
gl.clearColor(0.5, 0.5, 0.5, 1.0); // Opaque grey backdrop
After a push in the right direction (and plenty of trial and error) I figured out the problem.
The reason this is happening is because hardware anti-aliasing is disabled when rendering to an FBO texture. In order to fix it you need to implement your own AA.
To fix this for my project I attempted a crude MSAA post shader in combination with upscaling my FBO render to 2x the window resolution and that reduced the shimmering to a nearly unnoticeable level.
FXAA might be another good solution but I couldn't get it to work for me personally.
If there are some better ways to go about this let me know!

How can I use WebGL to create an RGBA texture based off of floating points?

I have a TIFF made up of floating points, not RGBA values, so it shows up as transparent. I used C++ to get the float values, and now I have a matrix of these values. How can I convert them to RGBA using WebGL, then make a texture out of them?
To load floating point values into a texture in WebGL you have to check for and enable floating point textures
var ext = gl.getExtension("OES_texture_float");
if (!ext) {
// Sorry, your browser/GPU/driver doesn't support floating point textures
}
After that you can upload floating point data
var data = new Float32Array(width * height * 4); //
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0 gl.RGBA, gl.FLOAT, data);
But you can't filter that texture so you have to set it to nearest
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
If you want to be able to filter you need to check for and enable that too
var ext = gl.getExtensions("OES_texture_float_linear");
if (!ext) {
// sorry, can't filter floating point textures
}
Rendering to a floating point texture is also an optional feature (using a a floating point textures as a framebuffer attachment). For that you'd attach the texture then check if it works
var fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) !== gl.FRAMEBUFFER_COMPLETE) {
// sorry, you can't render to a floating point texture
}
Admittedly i haven't really tried this directly with floating points for a texture, but i have done it with Uint8 values to create a blank texture.
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.FLOAT, new Float32Array(putFloatingPointArrayHere));
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.bindTexture(gl.TEXTURE_2D, null);

WebGL create Texture

I successfully created a WebGL texture from an image and drew it into a canvas element.
function initTexture(src) {
texture = gl.createTexture();
texture.image = new Image();
texture.image.onload = function() {
handleLoadedTexture(texture)
}
texture.image.src = src;
}
I also tried to create a texture from one of these datatypes, but without success.
[object ImageData]
[object CanvasPixelArray]
[object CanvasRenderingContext2D]
Is it possible to create a texture just with an image's pixel array?
Or in other words: Is it possible to create a JS Image object out of a pixel array?
Edit:
The pixel array looks like this [r,g,b,a,r,g,b,a,r,g,b,a,...] and each value is in a range of {0..255}.
I want to create a texture with the pixels in the given array.
It's absolutely possible to create a texture with a pixel array! I use the following in my code all the time to create a single pixel, solid color texture.
function createSolidTexture(gl, r, g, b, a) {
var data = new Uint8Array([r, g, b, a]);
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, data);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
return texture;
}
EDIT: To extrapolate this a little further, most of what you need to know is in the gl.texImage2d call. In order to create a texture from raw RGB(A) data you need an array of unsigned byte values, you need to specify to WebGL what the data represents (RGB or RGBA), and you need to know the dimensions of the texture. A more generalized function would look like this:
function textureFromPixelArray(gl, dataArray, type, width, height) {
var dataTypedArray = new Uint8Array(dataArray); // Don't need to do this if the data is already in a typed array
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, type, width, height, 0, type, gl.UNSIGNED_BYTE, dataTypedArray);
// Other texture setup here, like filter modes and mipmap generation
return texture;
}
// RGB Texture:
// For a 16x16 texture the array must have at least 768 values in it (16x16x3)
var rgbTex = textureFromPixelArray(gl, [r,g,b,r,g,b...], gl.RGB, 16, 16);
// RGBA Texture:
// For a 16x16 texture the array must have at least 1024 values in it (16x16x4)
var rgbaTex = textureFromPixelArray(gl, [r,g,b,a,r,g,b,a...], gl.RGBA, 16, 16);

Categories

Resources