Related
First off, I'm new at WebGL. I am trying to applying multiple fragment shaders(here, 2 shaders) on a single image to be rendered. I read in different articles and other stack overflow questions that we should use framebuffers (ping pong method) for this purpose but couldn't find any sample code snippets anywhere. What I understood is first I create two programs each with a different fragment shader. Then use a framebuffer object where I can use my first program (first shader) on the original image and output it to that fbo texture. Then use this output texture as the input in the second program so that both shaders are retained. This output is finally rendered on the canvas.
I tried doing the same thing , but my canvas is completely black. I am not getting any errors in the console everything seems fine but not result.
I am struck with for hours . Could anyone help me check it?
Below is the code I wrote
const canvas = document.querySelector("canvas")
const gl = canvas.getContext("webgl");
//create two programs using a createprogram function written in my code.
const programA = createProgram(gl, vertexShader, fragmentShaderA); // program using #shader1
const programB = createProgram(gl, vertexShader, fragmentShaderB);
const texFbPair = createTextureAndFramebuffer(gl); //function defined below
setAttributes(programA);
setAttributes(programB);
function setAttributes(program) {
const positionLocation = gl.getAttribLocation(program, 'position');
const positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-1, -1, -1, 1, 1, -1,
1, 1, 1, -1, -1, 1,
]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
const texCoordLocation = gl.getAttribLocation(program, "a_texCoord");
const texCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0.0, 1.0,
0.0, 0.0,
1.0, 1.0,
1.0, 0.0,
1.0, 1.0,
0.0, 0.0]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(texCoordLocation);
gl.vertexAttribPointer(texCoordLocation, 2, gl.FLOAT, false, 0, 0);
}
const texture = gl.createTexture();
texture.image = new Image();
texture.image.onload = function () {
handleLoadedTexture(gl, texture);
};
texture.image.crossOrigin = '';
texture.image.src = 'skogafoss_waterfall_iceland.jpg';
function handleLoadedTexture(gl, texture, callback) {
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, texture.image);
}
gl.useProgram(programA);
gl.bindFramebuffer(gl.FRAMEBUFFER, texFbPair.fb);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.clearColor(0, 0, 1, 1);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.drawArrays(gl.TRIANGLES, 0, 6);
gl.useProgram(programB);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.bindTexture(gl.TEXTURE_2D, texFbPair.tex);
gl.clearColor(0, 0, 0, 1);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.drawArrays(gl.TRIANGLES, 0, 6)
function createTextureAndFramebuffer(gl) {
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, canvas.width, canvas.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D( gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
return { tex: tex, fb: fb };
}
It looks like from your code you have a mis-understanding of how attributes work. Attributes are global state in WebGL1 so these lines
setAttributes(programA);
setAttributes(programB);
Won't work. The second call to setAttributes will just change the global attributes to the second call's settings.
See this and this
The next issue is the code does not wait for the image to load so it creates an image, sets a callback for when it finishes loading, it then draws 2 things. Then later, the image finishes loading and is copied to the texture but no drawing happens after that.
The code also never allocates the actual texture in createTextureAndFramebuffer
To do that you to call gl.texImage2D
Here is some working code.
const vertexShader = `
attribute vec4 position;
attribute vec2 a_texCoord;
varying vec2 v_texCoord;
void main() {
gl_Position = position;
v_texCoord = a_texCoord;
}
`;
const fragmentShaderA = `
precision highp float;
varying vec2 v_texCoord;
uniform sampler2D tex;
void main() {
gl_FragColor = texture2D(tex, v_texCoord);
}
`;
const fragmentShaderB = `
precision highp float;
varying vec2 v_texCoord;
uniform sampler2D tex;
void main() {
gl_FragColor = texture2D(tex, v_texCoord);
}
`;
const canvas = document.querySelector("canvas")
const gl = canvas.getContext("webgl");
//create two programs using a createprogram function written in my code.
const programA = createProgram(gl, vertexShader, fragmentShaderA); // program using #shader1
const programB = createProgram(gl, vertexShader, fragmentShaderB);
const texFbPair = createTextureAndFramebuffer(gl); //function defined below
function setAttributes(program) {
const positionLocation = gl.getAttribLocation(program, 'position');
const positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-1, -1, -1, 1, 1, -1,
1, 1, 1, -1, -1, 1,
]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
const texCoordLocation = gl.getAttribLocation(program, "a_texCoord");
const texCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0.0, 1.0,
0.0, 0.0,
1.0, 1.0,
1.0, 0.0,
1.0, 1.0,
0.0, 0.0]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(texCoordLocation);
gl.vertexAttribPointer(texCoordLocation, 2, gl.FLOAT, false, 0, 0);
}
const texture = gl.createTexture();
texture.image = new Image();
texture.image.onload = function () {
handleLoadedTexture(gl, texture);
};
texture.image.crossOrigin = '';
texture.image.src = 'https://i.imgur.com/ZKMnXce.png';
function handleLoadedTexture(gl, texture, callback) {
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, texture.image);
setAttributes(programA);
gl.useProgram(programA);
gl.bindFramebuffer(gl.FRAMEBUFFER, texFbPair.fb);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.clearColor(0, 0, 1, 1);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.drawArrays(gl.TRIANGLES, 0, 6);
setAttributes(programB);
gl.useProgram(programB);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.bindTexture(gl.TEXTURE_2D, texFbPair.tex);
gl.clearColor(0, 0, 0, 1);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.drawArrays(gl.TRIANGLES, 0, 6)}
function createTextureAndFramebuffer(gl) {
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(
gl.TEXTURE_2D,
0, // mip level
gl.RGBA, // internal format
gl.canvas.width, // width
gl.canvas.height, // height
0, // border
gl.RGBA, // format
gl.UNSIGNED_BYTE, // type
null); // data
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D( gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
return { tex: tex, fb: fb };
}
function createProgram(gl, vs, fs) {
return twgl.createProgram(gl, [vs, fs]);
}
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>
Other things I noticed.
The uniforms for the samplers are never looked up (of course I don't know what your actual shaders look like, I used placeholders). As such it works because uniforms default to 0 so the programs will reference the texture bound to texture unit 0 which is the default.
I also noticed the first draw via the framebuffer sets the viewport to the size of the canvas. That's correct if the size of the attachments in the framebuffer are the size of the canvas (which I made them be when I added the call to texImage2D) but it would probably be more appropriate to record a width and height for that texture so if you change its size the code won't fail.
Finally while I moved the calls to setAttributes to the correct places it's not common to also create and fill out buffers when rendering. It's more common to create buffers at init time and set attributes at render time but I didn't want to change more code.
You might find these tutorials helpful.
Is it possible to sample the default color buffer like a texture?
I would like to do something like this:
Render a cube
Render a fullscreen quad that blurs the cube
The reason that I don't want to render to a framebuffer is that WebGL has multisampling when using the default color buffer but not for framebuffers. A good solution would be to render to the default buffer and then copy it to a framebuffer or texture, but I'm not sure if/how that is possible.
You can't directly sample from the default color buffer, however the default framebuffer it's attached to is exactly that, a framebuffer and you can call copyTexImage2D on it and have the contents copied to the currently bound and active texture.
// initializing the texture
const targetTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, targetTexture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
// in your render loop
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.bindTexture(gl.TEXTURE_2D, targetTexture);
gl.copyTexImage2D(
gl.TEXTURE_2D, //target
0, // mip level, has to be zero for default framebuffer
gl.RGBA, // pixel format, default framebuffer is RGBA if the context was created with alpha false you need to use RGB
0, 0, // x,y
gl.drawingBufferWidth, // width
gl.drawingBufferHeight, // height
0 // border, always 0
);
// WebGL - Copy Default Framebuffer
// from https://stackoverflow.com/questions/55228453/sample-default-webgl-color-buffer/
// based on https://webglfundamentals.org/webgl/webgl-render-to-texture.html
"use strict";
function main() {
// Get A WebGL context
/** #type {HTMLCanvasElement} */
var canvas = document.getElementById("canvas");
var gl = canvas.getContext("webgl");
if (!gl) {
return;
}
// setup GLSL program
var program = webglUtils.createProgramFromScripts(gl, ["3d-vertex-shader", "3d-fragment-shader"]);
// look up where the vertex data needs to go.
var positionLocation = gl.getAttribLocation(program, "a_position");
var texcoordLocation = gl.getAttribLocation(program, "a_texcoord");
// lookup uniforms
var matrixLocation = gl.getUniformLocation(program, "u_matrix");
var textureLocation = gl.getUniformLocation(program, "u_texture");
// Create a buffer for positions
var positionBuffer = gl.createBuffer();
// Bind it to ARRAY_BUFFER (think of it as ARRAY_BUFFER = positionBuffer)
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Put the positions in the buffer
setGeometry(gl);
// provide texture coordinates for the rectangle.
var texcoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
// Set Texcoords.
setTexcoords(gl);
// Create a texture.
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
{
// fill texture with 3x2 pixels
const level = 0;
const internalFormat = gl.LUMINANCE;
const width = 3;
const height = 2;
const border = 0;
const format = gl.LUMINANCE;
const type = gl.UNSIGNED_BYTE;
const data = new Uint8Array([
128, 64, 128,
0, 192, 0,
]);
const alignment = 1;
gl.pixelStorei(gl.UNPACK_ALIGNMENT, alignment);
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, width, height, border,
format, type, data);
// set the filtering so we don't need mips and it's not filtered
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
}
// Create a texture to render to
const targetTextureWidth = gl.drawingBufferWidth;
const targetTextureHeight = gl.drawingBufferHeight;
const targetTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, targetTexture);
{
// define size and format of level 0
const level = 0;
const internalFormat = gl.RGBA;
const border = 0;
const format = gl.RGBA;
const type = gl.UNSIGNED_BYTE;
const data = null;
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
targetTextureWidth, targetTextureHeight, border,
format, type, data);
// set the filtering so we don't need mips
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
}
function degToRad(d) {
return d * Math.PI / 180;
}
var fieldOfViewRadians = degToRad(60);
var modelXRotationRadians = degToRad(0);
var modelYRotationRadians = degToRad(0);
// Get the starting time.
var then = 0;
requestAnimationFrame(drawScene);
function drawCube(aspect) {
// Tell it to use our program (pair of shaders)
gl.useProgram(program);
// Turn on the position attribute
gl.enableVertexAttribArray(positionLocation);
// Bind the position buffer.
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Tell the position attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 3; // 3 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
positionLocation, size, type, normalize, stride, offset);
// Turn on the teccord attribute
gl.enableVertexAttribArray(texcoordLocation);
// Bind the position buffer.
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
// Tell the position attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 2; // 2 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
texcoordLocation, size, type, normalize, stride, offset);
// Compute the projection matrix
var projectionMatrix =
m4.perspective(fieldOfViewRadians, aspect, 1, 2000);
var cameraPosition = [0, 0, 2];
var up = [0, 1, 0];
var target = [0, 0, 0];
// Compute the camera's matrix using look at.
var cameraMatrix = m4.lookAt(cameraPosition, target, up);
// Make a view matrix from the camera matrix.
var viewMatrix = m4.inverse(cameraMatrix);
var viewProjectionMatrix = m4.multiply(projectionMatrix, viewMatrix);
var matrix = m4.xRotate(viewProjectionMatrix, modelXRotationRadians);
matrix = m4.yRotate(matrix, modelYRotationRadians);
// Set the matrix.
gl.uniformMatrix4fv(matrixLocation, false, matrix);
// Tell the shader to use texture unit 0 for u_texture
gl.uniform1i(textureLocation, 0);
// Draw the geometry.
gl.drawArrays(gl.TRIANGLES, 0, 6 * 6);
}
// Draw the scene.
function drawScene(time) {
// convert to seconds
time *= 0.001;
// Subtract the previous time from the current time
var deltaTime = time - then;
// Remember the current time for the next frame.
then = time;
// Animate the rotation
modelYRotationRadians += -0.7 * deltaTime;
modelXRotationRadians += -0.4 * deltaTime;
webglUtils.resizeCanvasToDisplaySize(gl.canvas);
gl.enable(gl.CULL_FACE);
gl.enable(gl.DEPTH_TEST);
{
// render cube with our 3x2 texture
gl.bindTexture(gl.TEXTURE_2D, texture);
// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
// Clear the canvas AND the depth buffer.
gl.clearColor(0, 0, 1, 1); // clear to blue
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
const aspect = targetTextureWidth / targetTextureHeight;
drawCube(aspect);
}
{
// render the cube with the texture we just rendered to
gl.bindTexture(gl.TEXTURE_2D, targetTexture);
gl.copyTexImage2D(
gl.TEXTURE_2D, //target
0, // mip level, has to be zero for default framebuffer
gl.RGBA, // pixel format, default framebuffer is RGBA
0, 0, // x,y
gl.drawingBufferWidth, // width
gl.drawingBufferHeight, // height
0 // border, always 0
);
// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
// Clear the canvas AND the depth buffer.
gl.clearColor(1, 1, 1, 1); // clear to white
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
drawCube(aspect);
}
requestAnimationFrame(drawScene);
}
}
// Fill the buffer with the values that define a cube.
function setGeometry(gl) {
var positions = new Float32Array(
[
-0.5, -0.5, -0.5,
-0.5, 0.5, -0.5,
0.5, -0.5, -0.5,
-0.5, 0.5, -0.5,
0.5, 0.5, -0.5,
0.5, -0.5, -0.5,
-0.5, -0.5, 0.5,
0.5, -0.5, 0.5,
-0.5, 0.5, 0.5,
-0.5, 0.5, 0.5,
0.5, -0.5, 0.5,
0.5, 0.5, 0.5,
-0.5, 0.5, -0.5,
-0.5, 0.5, 0.5,
0.5, 0.5, -0.5,
-0.5, 0.5, 0.5,
0.5, 0.5, 0.5,
0.5, 0.5, -0.5,
-0.5, -0.5, -0.5,
0.5, -0.5, -0.5,
-0.5, -0.5, 0.5,
-0.5, -0.5, 0.5,
0.5, -0.5, -0.5,
0.5, -0.5, 0.5,
-0.5, -0.5, -0.5,
-0.5, -0.5, 0.5,
-0.5, 0.5, -0.5,
-0.5, -0.5, 0.5,
-0.5, 0.5, 0.5,
-0.5, 0.5, -0.5,
0.5, -0.5, -0.5,
0.5, 0.5, -0.5,
0.5, -0.5, 0.5,
0.5, -0.5, 0.5,
0.5, 0.5, -0.5,
0.5, 0.5, 0.5,
]);
gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW);
}
// Fill the buffer with texture coordinates the cube.
function setTexcoords(gl) {
gl.bufferData(
gl.ARRAY_BUFFER,
new Float32Array(
[
0, 0,
0, 1,
1, 0,
0, 1,
1, 1,
1, 0,
0, 0,
0, 1,
1, 0,
1, 0,
0, 1,
1, 1,
0, 0,
0, 1,
1, 0,
0, 1,
1, 1,
1, 0,
0, 0,
0, 1,
1, 0,
1, 0,
0, 1,
1, 1,
0, 0,
0, 1,
1, 0,
0, 1,
1, 1,
1, 0,
0, 0,
0, 1,
1, 0,
1, 0,
0, 1,
1, 1,
]),
gl.STATIC_DRAW);
}
main();
<canvas id="canvas" style="width:512px;height:512px"></canvas>
<!-- vertex shader -->
<script id="3d-vertex-shader" type="x-shader/x-vertex">
attribute vec4 a_position;
attribute vec2 a_texcoord;
uniform mat4 u_matrix;
varying vec2 v_texcoord;
void main() {
// Multiply the position by the matrix.
gl_Position = u_matrix * a_position;
// Pass the texcoord to the fragment shader.
v_texcoord = a_texcoord;
}
</script>
<!-- fragment shader -->
<script id="3d-fragment-shader" type="x-shader/x-fragment">
precision mediump float;
// Passed in from the vertex shader.
varying vec2 v_texcoord;
// The texture.
uniform sampler2D u_texture;
void main() {
gl_FragColor = texture2D(u_texture, v_texcoord);
}
</script>
<!--
for most samples webgl-utils only provides shader compiling/linking and
canvas resizing because why clutter the examples with code that's the same in every sample.
See http://webglfundamentals.org/webgl/lessons/webgl-boilerplate.html
and http://webglfundamentals.org/webgl/lessons/webgl-resizing-the-canvas.html
for webgl-utils, m3, m4, and webgl-lessons-ui.
-->
<script src="https://webglfundamentals.org/webgl/resources/webgl-utils.js"></script>
<script src="https://webglfundamentals.org/webgl/resources/m4.js"></script>
We're working in Webgl 1 and attempting to render to a cubemap using a stencil. Rendering to the cubemap on its own works fine. When we add a DEPTH_STENCIL renderbuffer it stops writing to the cubemap and issues no error.
This doesn't happen with a normal TEXTURE_2D instead of a TEXTURE_CUBE_MAP.
Depth/stencil/scissor tests are disabled.
The call to framebufferRenderbuffer is what breaks it.
Switching the renderbuffer to be either just a stencil or just a depth has the same effect.
Switching the renderbuffer to be a colour buffer makes it work again.
Here's a minimal-ish recreation. As you can see, we're getting a console output with the correct values for the first three calls and zeroes for the last call.
Why is this happening and what little thing are we missing to make renderbuffers work with cubemaps?
const canvas = document.createElement("canvas");
const gl = canvas.getContext("webgl");
console.log(TEST(false, false));
console.log(TEST(false, true));
console.log(TEST(true, false));
console.log(TEST(true, true));
function TEST(useCubemap, useBuffer) {
const size = 512;
const textureType = useCubemap ? gl.TEXTURE_CUBE_MAP : gl.TEXTURE_2D;
// SETUP THE PROGRAM
{
const program = gl.createProgram();
const vertShader = gl.createShader(gl.VERTEX_SHADER);
const fragShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(vertShader, `
attribute vec2 a_position;
void main() {
gl_Position = vec4(a_position, 0.2, 1.0);
}
`);
gl.compileShader(vertShader);
gl.attachShader(program, vertShader);
gl.shaderSource(fragShader, `
void main() {
gl_FragColor = vec4(0.1, 0.2, 0.3, 0.4);
}
`);
gl.compileShader(fragShader);
gl.attachShader(program, fragShader);
gl.linkProgram(program);
gl.useProgram(program);
}
// SETUP THE QUAD
{
const posBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, posBuffer);
gl.enableVertexAttribArray(0);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1, +1, -1, -1, +1, +1, +1, -1]), gl.STATIC_DRAW);
}
// SETUP THE FRAMEBUFFER
{
const fb = gl.createFramebuffer();
const targetTexture = gl.createTexture();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.bindTexture(textureType, targetTexture);
gl.texParameteri(textureType, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(textureType, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(textureType, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(textureType, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
// SWITCH TEXTURE TYPE
if (textureType === gl.TEXTURE_2D) {
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, size, size, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, targetTexture, 0);
} else {
for (let i = 0; i < 6; i++) gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, gl.RGBA, size, size, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_CUBE_MAP_POSITIVE_X, targetTexture, 0);
}
}
// SETUP THE RENDER BUFFER
{
const rb = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, rb);
gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_STENCIL, size, size);
// TAKING THIS OUT MAKES IT WORK
if (useBuffer) gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_STENCIL_ATTACHMENT, gl.RENDERBUFFER, rb);
}
// DISABLE THE OBVIOUS CULPRITS
gl.disable(gl.DEPTH_TEST);
gl.disable(gl.STENCIL_TEST);
gl.disable(gl.SCISSOR_TEST);
// DO A RENDERYFUCK
gl.viewport(0, 0, size, size);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
// GET THE OUTFUCK
const pixels = new Uint8Array(4);
gl.readPixels(0, 0, 1, 1, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
return pixels;
}
It works for me. I get the same values for all 4 calls in the code you posted. What OS/GPU/Driver are you using? Can you pastebin your about:gpu contents if you're using Chrome?
It sounds like a bug in your drivers. It's possible it's a also a bug in the WebGL spec.
The OpenGL ES spec does not require any combinations of framebuffer attachments to work (zero, zilch, nada). The WebGL spec require 3 combinations to work. From the spec, section 6.8:
The following combinations of framebuffer object attachments, when all of the attachments are framebuffer attachment complete, non-zero, and have the same width and height, must result in the framebuffer being framebuffer complete:
COLOR_ATTACHMENT0 = RGBA/UNSIGNED_BYTE texture
COLOR_ATTACHMENT0 = RGBA/UNSIGNED_BYTE texture + DEPTH_ATTACHMENT = DEPTH_COMPONENT16 renderbuffer
COLOR_ATTACHMENT0 = RGBA/UNSIGNED_BYTE texture + DEPTH_STENCIL_ATTACHMENT = DEPTH_STENCIL renderbuffer
But looking at the WebGL Conformance tests only TEXTURE_2D is tested.
So, first off that suggests your driver/gpu doesn't support that combination with cubemaps. Test by calling gl.checkFramebufferStatus. If it doesn't return gl.FRAMEBUFFER_COMPLETE your setup doesn't support rendering to a cubemap with a depth stencil attachment.
const canvas = document.createElement("canvas");
const gl = canvas.getContext("webgl");
TEST("TEXTURE_2D", "DEPTH_COMPONENT16");
TEST("TEXTURE_2D", "DEPTH_STENCIL");
TEST("TEXTURE_CUBE_MAP", "DEPTH_COMPONENT16");
TEST("TEXTURE_CUBE_MAP", "DEPTH_STENCIL");
function TEST(target, depthBufferFormat) {
const size = 16;
const textureType = gl[target];
// SETUP THE FRAMEBUFFER
{
const fb = gl.createFramebuffer();
const targetTexture = gl.createTexture();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.bindTexture(textureType, targetTexture);
gl.texParameteri(textureType, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(textureType, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(textureType, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(textureType, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
// SWITCH TEXTURE TYPE
if (textureType === gl.TEXTURE_2D) {
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, size, size, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, targetTexture, 0);
} else {
for (let i = 0; i < 6; i++) gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, gl.RGBA, size, size, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_CUBE_MAP_POSITIVE_X, targetTexture, 0);
}
}
// SETUP THE RENDER BUFFER
{
const rb = gl.createRenderbuffer();
const format = gl[depthBufferFormat];
gl.bindRenderbuffer(gl.RENDERBUFFER, rb);
gl.renderbufferStorage(gl.RENDERBUFFER, format, size, size);
// TAKING THIS OUT MAKES IT WORK
const attachmentPoint = depthBufferFormat === "DEPTH_COMPONENT16"
? gl.DEPTH_ATTACHMENT
: gl.DEPTH_STENCIL_ATTACHMENT;
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, attachmentPoint, gl.RENDERBUFFER, rb);
}
const success = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE;
console.log(target, depthBufferFormat, success ? "PASS" : "**FAIL**");
}
Do you need the stencil or just the depth buffer? Does this sample run for you? It's using a DEPTH_COMPONENT16 attachment.
I want to draw a line in WebGL context but it isn't working. I can clear the canvas color so I know the program is compiled, but not sure where is the error.
The main part of the code is
let canvas = document.getElementById("gl-canvas");
let gl = WebGLUtils.setupWebGL(canvas);
if (!gl) {
alert("WebGL isn't available");
}
let program = initShaders(gl, "vertex-shader", "fragment-shader");
gl.useProgram(program);
gl.viewport(0, 0, canvas.width, canvas.height);
gl.clearColor(0, 0, 0, 0.5);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
let color = gl.getAttribLocation(program, "color");
let colorBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, colorBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1,0,0,1, 1,0,0,1]), gl.STATIC_DRAW);
gl.vertexAttribPointer(color, 4, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(color);
let vPosition = gl.getAttribLocation(program, "vPosition");
let positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1,1,1,1, -1,-1,-1,1]), gl.STATIC_DRAW);
gl.vertexAttribPointer(vPosition, 4, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(vPosition);
gl.drawArrays(gl.LINES, 0, 1);
The full code can be found here
The problem is your drawArrays call:
count
Specifies the number of indices to be rendered.
Every line needs two indices: starting point and end point:
gl.drawArrays(gl.LINES, 0, 2);
I'm trying draw an image (as a texture) to a framebuffer, apply sharpness filter (by dragging slider on UI), then read result from the framebuffer and copy data to a simple 2d canvas (not webgl) by calling readPixels with the binded framebuffer, getting array with pixels and copying them to ImageData.data, but the function returns original texture.
Maybe someone can explain this to me, because as I understand, the thing on a screen is actually a framebuffer's content.
Sorry for a lot of code, but I hope it can help to understand what I'm doing.
(function () {
var anotherContext = null;
var canvas = $("#canvas");
main();
setupCanvas();
function setupCanvas () {
anotherContext = document.getElementById("anothercanvas").getContext("2d");
}
function main () {
var image = new Image();
image.src = "http://localhost:9292/img/ava.jpg";
image.onload = function () {
render(image);
}
}
function render (image) {
//-----get contexts----
var canvas = document.getElementById('canvas');
var gl = canvas.getContext('experimental-webgl');
//----define shaders-----
var vs = document.getElementById('vshader').textContent;
var fs = document.getElementById('fshader').textContent;
//----create program-----
var program = createProgram(vs, fs);
gl.useProgram(program);
//----setup vertex data-----
var positionLocation = gl.getAttribLocation(program, "a_position");
var texCoordLocation = gl.getAttribLocation(program, "a_texCoord");
//----setup texture-----
var texCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0.0, 0.0,
1.0, 0.0,
0.0, 1.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(texCoordLocation);
gl.vertexAttribPointer(texCoordLocation, 2, gl.FLOAT, false, 0, 0);
// Create a texture.
var texture = createAndSetupTexture();
// Upload the image into the texture.
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
//---framebuffer----
var framebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
var canRead = (gl.checkFramebufferStatus(gl.FRAMEBUFFER) == gl.FRAMEBUFFER_COMPLETE);
console.log("Can read: ", canRead);
//----lookup uniforms and set the resolution-----
var resolutionLocation = gl.getUniformLocation(program, "u_resolution");
var textureSizeLocation = gl.getUniformLocation(program, "u_textureSize");
var kernelLocation = gl.getUniformLocation(program, "u_kernel[0]");
gl.uniform2f(textureSizeLocation, image.width, image.height);
//----kernels-----
var kernel = [
0, 0, 0,
0, 1, 0,
0, 0, 0
];
var sharpnessKernel = [
0,-1, 0,
-1, 5, -1,
0,-1, 0
];
//-----bind buffer------
var vertexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(program.vertexPosAttrib, 2, gl.FLOAT, false, 0, 0);
setRectangle(gl, 0, 0, image.width, image.height);
draw(kernel);
function draw (krn) {
// gl.bindTexture(gl.TEXTURE_2D, texture);
setFramebuffer(framebuffer);
drawWithKernel(krn);
copyImage();
// gl.bindTexture(gl.TEXTURE_2D, texture);
setFramebuffer(null);
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
function setFramebuffer (fbuf) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbuf);
gl.uniform2f(resolutionLocation, canvas.width, canvas.height);
gl.viewport(0, 0, canvas.width, canvas.height);
}
function drawWithKernel (kernel) {
gl.uniform1fv(kernelLocation, kernel);
//---draw
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
function createAndSetupTexture () {
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
// Set the parameters so we can render any size image.
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
return texture;
}
function setRectangle (gl, x, y, width, height) {
var x1 = x;
var x2 = x + width;
var y1 = y;
var y2 = y + height;
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
x1, y1,
x2, y1,
x1, y2,
x1, y2,
x2, y1,
x2, y2]), gl.STATIC_DRAW);
}
function createShader(str, type) {
var shader = gl.createShader(type);
gl.shaderSource(shader, str);
gl.compileShader(shader);
return shader;
}
function createProgram (vstr, fstr) {
var program = gl.createProgram();
var vshader = createShader(vstr, gl.VERTEX_SHADER);
var fshader = createShader(fstr, gl.FRAGMENT_SHADER);
gl.attachShader(program, vshader);
gl.attachShader(program, fshader);
gl.linkProgram(program);
return program;
}
function copyImage () {
var pixels = new Uint8Array(image.width * image.height * 4);
gl.readPixels(0, 0, image.width, image.height, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
var imageData = anotherContext.createImageData(image.width, image.height);
for (var i = pixels.length - 1; i >= 0; i--) {
imageData.data[i] = pixels[i];
};
// console.log(imageData.data);
anotherContext.putImageData(imageData, 0, 0);
}
$("#slider").slider({
min: 0,
max: 99,
slide: function (event, ui) {
var currentKernel = null;
//do not use any filtering if slider is on 0 position
if(ui.value == 0) {
currentKernel = kernel;
}
else {
currentKernel = sharpnessKernel.slice(0);
currentKernel[4] -= (ui.value / 100);
}
draw(currentKernel);
}
});
}
})()
Your current function and bracket setup is completely broken, which also suggests that you're not using firebug or any other web-console to debug your javascript.
For a start you should move some functions outside of the main one and get a modern editor as most of them have means of showing brackets which belong together.
EDIT: Looks similar to the code from WebGL fundamentals. To answer your question: you're drawing to screen only if the bound framebuffer is null (as you can read on that site).
Anyway, you might be helped with this one: Creating texture from getImageData (Javascript) or possibly fabric.js image filters