WebGL texImage2D parameters - javascript

I am trying to load an image file into my webGL code and create a texture out of it but while loading the image i get the error:
Uncaught TypeError: Failed to execute 'texImage2D' on 'WebGLRenderingContext':
parameter 9 is not of type 'ArrayBufferView'.
at Image. (app.js:13)
var image = new Image();
image.src = "./js-logo.png";
image.addEventListener("load", function(){
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 10, 10, 0, gl.RGBA, gl.UNSIGNED_BYTE, image);
gl.generateMipmap(gl.TEXTURE_2D);
});

See WebGL Specification - 5.14 The WebGL context:
[...]
void texImage2D(GLenum target, GLint level, GLint internalformat,
GLsizei width, GLsizei height, GLint border, GLenum format,
GLenum type, [AllowShared] ArrayBufferView? pixels);
void texImage2D(GLenum target, GLint level, GLint internalformat,
GLenum format, GLenum type, TexImageSource source); // May throw DOMException
Since image is image is a TexImageSource it has to be
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 10, 10, 0, gl.RGBA, gl.UNSIGNED_BYTE, image);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
Note, the size of the bitmap is encode int the image object.
Further you should set the texture filtering parameters:
var image = new Image();
image.src = "./js-logo.png";
image.addEventListener("load", function(){
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 10, 10, 0, gl.RGBA, gl.UNSIGNED_BYTE, image);
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_LINEAR );
gl.generateMipmap(gl.TEXTURE_2D);
});

Related

how do I readPixels() an R32F texture in webgl2?

I am using WebGL2 to render 32-bit floating point values into a 2D texture with only one channel (red). After the draw call, I want to read those values in JavaScript.
The following gives GL_INVALID_OPERATION: Invalid format and type combination. because of that last gl.readPixels line.
const gl = document.createElement("canvas").getContext("webgl2");
if(!gl.getExtension("EXT_color_buffer_float")) {
throw "float";
}
// ... compile program ...
gl.bindBuffer(gl.ARRAY_BUFFER, gl.createBuffer());
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-1, -1,
1, -1,
1, 1,
-1, -1,
-1, 1,
1, 1,
]), gl.STATIC_DRAW);
const texture = gl.createTexture();
const framebuffer = gl.createFramebuffer();
gl.viewport(0, 0, width, height);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.R32F, width, height, 0, gl.RED, gl.FLOAT, null);
gl.useProgram(program);
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
gl.drawArrays(gl.TRIANGLES, 0, 6);
const pixels = new Float32Array(width * height);
gl.readPixels(0, 0, width, height, gl.RED, gl.FLOAT, pixels);
How do I read from an R32F texture?

webgl multiple fragment shaders on a image using frame buffer object gives black output

First off, I'm new at WebGL. I am trying to applying multiple fragment shaders(here, 2 shaders) on a single image to be rendered. I read in different articles and other stack overflow questions that we should use framebuffers (ping pong method) for this purpose but couldn't find any sample code snippets anywhere. What I understood is first I create two programs each with a different fragment shader. Then use a framebuffer object where I can use my first program (first shader) on the original image and output it to that fbo texture. Then use this output texture as the input in the second program so that both shaders are retained. This output is finally rendered on the canvas.
I tried doing the same thing , but my canvas is completely black. I am not getting any errors in the console everything seems fine but not result.
I am struck with for hours . Could anyone help me check it?
Below is the code I wrote
const canvas = document.querySelector("canvas")
const gl = canvas.getContext("webgl");
//create two programs using a createprogram function written in my code.
const programA = createProgram(gl, vertexShader, fragmentShaderA); // program using #shader1
const programB = createProgram(gl, vertexShader, fragmentShaderB);
const texFbPair = createTextureAndFramebuffer(gl); //function defined below
setAttributes(programA);
setAttributes(programB);
function setAttributes(program) {
const positionLocation = gl.getAttribLocation(program, 'position');
const positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-1, -1, -1, 1, 1, -1,
1, 1, 1, -1, -1, 1,
]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
const texCoordLocation = gl.getAttribLocation(program, "a_texCoord");
const texCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0.0, 1.0,
0.0, 0.0,
1.0, 1.0,
1.0, 0.0,
1.0, 1.0,
0.0, 0.0]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(texCoordLocation);
gl.vertexAttribPointer(texCoordLocation, 2, gl.FLOAT, false, 0, 0);
}
const texture = gl.createTexture();
texture.image = new Image();
texture.image.onload = function () {
handleLoadedTexture(gl, texture);
};
texture.image.crossOrigin = '';
texture.image.src = 'skogafoss_waterfall_iceland.jpg';
function handleLoadedTexture(gl, texture, callback) {
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, texture.image);
}
gl.useProgram(programA);
gl.bindFramebuffer(gl.FRAMEBUFFER, texFbPair.fb);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.clearColor(0, 0, 1, 1);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.drawArrays(gl.TRIANGLES, 0, 6);
gl.useProgram(programB);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.bindTexture(gl.TEXTURE_2D, texFbPair.tex);
gl.clearColor(0, 0, 0, 1);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.drawArrays(gl.TRIANGLES, 0, 6)
function createTextureAndFramebuffer(gl) {
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, canvas.width, canvas.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D( gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
return { tex: tex, fb: fb };
}
It looks like from your code you have a mis-understanding of how attributes work. Attributes are global state in WebGL1 so these lines
setAttributes(programA);
setAttributes(programB);
Won't work. The second call to setAttributes will just change the global attributes to the second call's settings.
See this and this
The next issue is the code does not wait for the image to load so it creates an image, sets a callback for when it finishes loading, it then draws 2 things. Then later, the image finishes loading and is copied to the texture but no drawing happens after that.
The code also never allocates the actual texture in createTextureAndFramebuffer
To do that you to call gl.texImage2D
Here is some working code.
const vertexShader = `
attribute vec4 position;
attribute vec2 a_texCoord;
varying vec2 v_texCoord;
void main() {
gl_Position = position;
v_texCoord = a_texCoord;
}
`;
const fragmentShaderA = `
precision highp float;
varying vec2 v_texCoord;
uniform sampler2D tex;
void main() {
gl_FragColor = texture2D(tex, v_texCoord);
}
`;
const fragmentShaderB = `
precision highp float;
varying vec2 v_texCoord;
uniform sampler2D tex;
void main() {
gl_FragColor = texture2D(tex, v_texCoord);
}
`;
const canvas = document.querySelector("canvas")
const gl = canvas.getContext("webgl");
//create two programs using a createprogram function written in my code.
const programA = createProgram(gl, vertexShader, fragmentShaderA); // program using #shader1
const programB = createProgram(gl, vertexShader, fragmentShaderB);
const texFbPair = createTextureAndFramebuffer(gl); //function defined below
function setAttributes(program) {
const positionLocation = gl.getAttribLocation(program, 'position');
const positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-1, -1, -1, 1, 1, -1,
1, 1, 1, -1, -1, 1,
]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
const texCoordLocation = gl.getAttribLocation(program, "a_texCoord");
const texCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0.0, 1.0,
0.0, 0.0,
1.0, 1.0,
1.0, 0.0,
1.0, 1.0,
0.0, 0.0]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(texCoordLocation);
gl.vertexAttribPointer(texCoordLocation, 2, gl.FLOAT, false, 0, 0);
}
const texture = gl.createTexture();
texture.image = new Image();
texture.image.onload = function () {
handleLoadedTexture(gl, texture);
};
texture.image.crossOrigin = '';
texture.image.src = 'https://i.imgur.com/ZKMnXce.png';
function handleLoadedTexture(gl, texture, callback) {
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, texture.image);
setAttributes(programA);
gl.useProgram(programA);
gl.bindFramebuffer(gl.FRAMEBUFFER, texFbPair.fb);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.clearColor(0, 0, 1, 1);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.drawArrays(gl.TRIANGLES, 0, 6);
setAttributes(programB);
gl.useProgram(programB);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.bindTexture(gl.TEXTURE_2D, texFbPair.tex);
gl.clearColor(0, 0, 0, 1);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.drawArrays(gl.TRIANGLES, 0, 6)}
function createTextureAndFramebuffer(gl) {
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(
gl.TEXTURE_2D,
0, // mip level
gl.RGBA, // internal format
gl.canvas.width, // width
gl.canvas.height, // height
0, // border
gl.RGBA, // format
gl.UNSIGNED_BYTE, // type
null); // data
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D( gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
return { tex: tex, fb: fb };
}
function createProgram(gl, vs, fs) {
return twgl.createProgram(gl, [vs, fs]);
}
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>
Other things I noticed.
The uniforms for the samplers are never looked up (of course I don't know what your actual shaders look like, I used placeholders). As such it works because uniforms default to 0 so the programs will reference the texture bound to texture unit 0 which is the default.
I also noticed the first draw via the framebuffer sets the viewport to the size of the canvas. That's correct if the size of the attachments in the framebuffer are the size of the canvas (which I made them be when I added the call to texImage2D) but it would probably be more appropriate to record a width and height for that texture so if you change its size the code won't fail.
Finally while I moved the calls to setAttributes to the correct places it's not common to also create and fill out buffers when rendering. It's more common to create buffers at init time and set attributes at render time but I didn't want to change more code.
You might find these tutorials helpful.

WEBGL-FBO: Why is the DEPTH_COMPONENT texture blank after rendercall

To The Readers:
"The solution by gman works for the question, but my own Problem was a in a diffrent part of the Code".
I just wanted to render my Scene to a Framebuffer with a DepthComponent_texuture. And wanted to render this texture to the screen, so that i olny see the DepthComponent on my Screen. I want to get into Shadowmapping and throught this would be a good exercise.
Stuff i did:
-set read/drawBuffer(s) to gl.NONE because i dont hava a colorAttachment (probably dosent even matter).
-when creating the depth texture i tried gl.texImage2D but could not get it to work.
-When i am using a ColorAttachment isted of a DepthAttachment, i get a normal result(The color Attachment_texture just has the sceen)[obviously here i also added a DepthRenderbuffer(DEPTH24STENCTIL8)].
//creating the Framebuffer
id = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER,id);
atex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, atex);
gl.texStorage2D(gl.TEXTURE_2D,1,gl.DEPTH_COMPONENT16,width,height);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.TEXTURE_2D, atex, 0);
gl.drawBuffers([gl.NONE]);
gl.readBuffer(gl.NONE);
gl.bindFramebuffer(gl.FRAMEBUFFER,null);
//----------------
//In the renderLoop
//renders the scene with the pbrShader to the Framebuffer
//The FragmentShader of the PBRShader is empty cause i dont have any
//ColorAttachments and the Vertex is just transforming the position and
//passing the uvs[...]
render(scene,pbrShader,framebuffer);
//renders a texture to the screen using a very simple vertex shader and
//a Fragment Shader that just takes the texture and mapps it to the Screen
renderScreen(framebuffer.atex);
//Some more Code for clarity:
renderScene(scene,shader,fbo=null){
if(fbo!=null){
fbo.activate();
}
this.gl.fClear();
let batches = new Map();
let modals = scene.getEntitiesByType("modal");
modals.forEach(modal=>{
if(batches.has(modal.mesh)){
batches.get(modal.mesh).push(modal);
}else{
batches.set(modal.mesh,[modal]);
}
})
shader.activate().prepareScene(scene);
batches.forEach((batch,mesh)=>{
shader.prepareVAO(mesh);
batch.forEach(modal=>{
shader.prepareInstance(modal);
shader.drawVAO(mesh);
});
shader.unbindVAO(mesh);
});
shader.deactivate();
if(fbo!=null){
fbo.deactivate();
}
}
Here's working example of using depth textures in WebGL2
const vs = `#version 300 es
layout(location = 0) in vec4 a_position;
out vec2 v_texcoord;
void main() {
gl_Position = a_position;
v_texcoord = a_position.xy * 0.5 + 0.5;
}
`;
const fs = `#version 300 es
precision mediump float;
in vec2 v_texcoord;
uniform sampler2D u_sampler;
out vec4 outColor;
void main() {
vec4 color = texture(u_sampler, v_texcoord);
outColor = vec4(color.r, 0, 0, 1);
}
`;
function main() {
var gl = document.querySelector("canvas").getContext('webgl2');
if (!gl) {
return alert("no WebGL2");
}
var program = twgl.createProgram(gl, [vs, fs]);
gl.useProgram(program);
var verts = [
1, 1, 1,
-1, 1, 0,
-1, -1, -1,
1, 1, 1,
-1, -1, -1,
1, -1, 0,
];
var vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.STATIC_DRAW);
gl.enableVertexAttribArray(0);
gl.vertexAttribPointer(0, 3, gl.FLOAT, false, 0, 0);
// create a depth texture.
var depthTex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, depthTex);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
const mipLevel = 0;
const depthTexWidth = 16;
const depthTexHeight = 16;
gl.texImage2D(
gl.TEXTURE_2D, mipLevel, gl.DEPTH_COMPONENT24,
depthTexWidth, depthTexHeight, 0,
gl.DEPTH_COMPONENT, gl.UNSIGNED_INT, null);
// Create a framebuffer and attach the textures.
var fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT,
gl.TEXTURE_2D, depthTex, 0);
// use some other texture to render with while we render to the depth texture.
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(
gl.TEXTURE_2D, 0, gl.RGBA8,
1, // width
1, // height
0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0, 0, 255, 255]));
// Turn on depth testing so we can write to the depth texture.
gl.enable(gl.DEPTH_TEST);
// note: we don't need to set u_sampler because uniforms
// default to 0 so it will use texture unit 0 which
// is the also the default active texture unit
// Render to the depth texture
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.viewport(0, 0, depthTexWidth, depthTexHeight);
gl.clear(gl.DEPTH_BUFFER_BIT);
gl.drawArrays(gl.TRIANGLES, 0, 6);
// Now draw with the texture to the canvas
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.bindTexture(gl.TEXTURE_2D, depthTex);
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
main();
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>

Webgl - adding DEPTH_STENCIL renderbuffer prevents rendering to cubemap framebuffer

We're working in Webgl 1 and attempting to render to a cubemap using a stencil. Rendering to the cubemap on its own works fine. When we add a DEPTH_STENCIL renderbuffer it stops writing to the cubemap and issues no error.
This doesn't happen with a normal TEXTURE_2D instead of a TEXTURE_CUBE_MAP.
Depth/stencil/scissor tests are disabled.
The call to framebufferRenderbuffer is what breaks it.
Switching the renderbuffer to be either just a stencil or just a depth has the same effect.
Switching the renderbuffer to be a colour buffer makes it work again.
Here's a minimal-ish recreation. As you can see, we're getting a console output with the correct values for the first three calls and zeroes for the last call.
Why is this happening and what little thing are we missing to make renderbuffers work with cubemaps?
const canvas = document.createElement("canvas");
const gl = canvas.getContext("webgl");
console.log(TEST(false, false));
console.log(TEST(false, true));
console.log(TEST(true, false));
console.log(TEST(true, true));
function TEST(useCubemap, useBuffer) {
const size = 512;
const textureType = useCubemap ? gl.TEXTURE_CUBE_MAP : gl.TEXTURE_2D;
// SETUP THE PROGRAM
{
const program = gl.createProgram();
const vertShader = gl.createShader(gl.VERTEX_SHADER);
const fragShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(vertShader, `
attribute vec2 a_position;
void main() {
gl_Position = vec4(a_position, 0.2, 1.0);
}
`);
gl.compileShader(vertShader);
gl.attachShader(program, vertShader);
gl.shaderSource(fragShader, `
void main() {
gl_FragColor = vec4(0.1, 0.2, 0.3, 0.4);
}
`);
gl.compileShader(fragShader);
gl.attachShader(program, fragShader);
gl.linkProgram(program);
gl.useProgram(program);
}
// SETUP THE QUAD
{
const posBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, posBuffer);
gl.enableVertexAttribArray(0);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1, +1, -1, -1, +1, +1, +1, -1]), gl.STATIC_DRAW);
}
// SETUP THE FRAMEBUFFER
{
const fb = gl.createFramebuffer();
const targetTexture = gl.createTexture();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.bindTexture(textureType, targetTexture);
gl.texParameteri(textureType, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(textureType, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(textureType, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(textureType, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
// SWITCH TEXTURE TYPE
if (textureType === gl.TEXTURE_2D) {
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, size, size, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, targetTexture, 0);
} else {
for (let i = 0; i < 6; i++) gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, gl.RGBA, size, size, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_CUBE_MAP_POSITIVE_X, targetTexture, 0);
}
}
// SETUP THE RENDER BUFFER
{
const rb = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, rb);
gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_STENCIL, size, size);
// TAKING THIS OUT MAKES IT WORK
if (useBuffer) gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_STENCIL_ATTACHMENT, gl.RENDERBUFFER, rb);
}
// DISABLE THE OBVIOUS CULPRITS
gl.disable(gl.DEPTH_TEST);
gl.disable(gl.STENCIL_TEST);
gl.disable(gl.SCISSOR_TEST);
// DO A RENDERYFUCK
gl.viewport(0, 0, size, size);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
// GET THE OUTFUCK
const pixels = new Uint8Array(4);
gl.readPixels(0, 0, 1, 1, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
return pixels;
}
It works for me. I get the same values for all 4 calls in the code you posted. What OS/GPU/Driver are you using? Can you pastebin your about:gpu contents if you're using Chrome?
It sounds like a bug in your drivers. It's possible it's a also a bug in the WebGL spec.
The OpenGL ES spec does not require any combinations of framebuffer attachments to work (zero, zilch, nada). The WebGL spec require 3 combinations to work. From the spec, section 6.8:
The following combinations of framebuffer object attachments, when all of the attachments are framebuffer attachment complete, non-zero, and have the same width and height, must result in the framebuffer being framebuffer complete:
COLOR_ATTACHMENT0 = RGBA/UNSIGNED_BYTE texture
COLOR_ATTACHMENT0 = RGBA/UNSIGNED_BYTE texture + DEPTH_ATTACHMENT = DEPTH_COMPONENT16 renderbuffer
COLOR_ATTACHMENT0 = RGBA/UNSIGNED_BYTE texture + DEPTH_STENCIL_ATTACHMENT = DEPTH_STENCIL renderbuffer
But looking at the WebGL Conformance tests only TEXTURE_2D is tested.
So, first off that suggests your driver/gpu doesn't support that combination with cubemaps. Test by calling gl.checkFramebufferStatus. If it doesn't return gl.FRAMEBUFFER_COMPLETE your setup doesn't support rendering to a cubemap with a depth stencil attachment.
const canvas = document.createElement("canvas");
const gl = canvas.getContext("webgl");
TEST("TEXTURE_2D", "DEPTH_COMPONENT16");
TEST("TEXTURE_2D", "DEPTH_STENCIL");
TEST("TEXTURE_CUBE_MAP", "DEPTH_COMPONENT16");
TEST("TEXTURE_CUBE_MAP", "DEPTH_STENCIL");
function TEST(target, depthBufferFormat) {
const size = 16;
const textureType = gl[target];
// SETUP THE FRAMEBUFFER
{
const fb = gl.createFramebuffer();
const targetTexture = gl.createTexture();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.bindTexture(textureType, targetTexture);
gl.texParameteri(textureType, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(textureType, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(textureType, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(textureType, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
// SWITCH TEXTURE TYPE
if (textureType === gl.TEXTURE_2D) {
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, size, size, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, targetTexture, 0);
} else {
for (let i = 0; i < 6; i++) gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, gl.RGBA, size, size, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_CUBE_MAP_POSITIVE_X, targetTexture, 0);
}
}
// SETUP THE RENDER BUFFER
{
const rb = gl.createRenderbuffer();
const format = gl[depthBufferFormat];
gl.bindRenderbuffer(gl.RENDERBUFFER, rb);
gl.renderbufferStorage(gl.RENDERBUFFER, format, size, size);
// TAKING THIS OUT MAKES IT WORK
const attachmentPoint = depthBufferFormat === "DEPTH_COMPONENT16"
? gl.DEPTH_ATTACHMENT
: gl.DEPTH_STENCIL_ATTACHMENT;
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, attachmentPoint, gl.RENDERBUFFER, rb);
}
const success = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE;
console.log(target, depthBufferFormat, success ? "PASS" : "**FAIL**");
}
Do you need the stencil or just the depth buffer? Does this sample run for you? It's using a DEPTH_COMPONENT16 attachment.

How to draw image in WebGL using another canvas buffer Data?

I am trying to draw image to webgl canvas from a 2d canvas.
If I use:
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
, it works and renders the image successfully, but if I use :
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, c.width, c.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, dataTypedArray);
, it just shows a black screen.
Here's my Code :
Vertex Shader
attribute vec2 a_position;
uniform vec2 u_resolution;
uniform mat3 u_matrix;
varying vec2 v_texCoord;
void main() {
gl_Position = vec4(u_matrix * vec3(a_position, 1), 1);
v_texCoord = a_position;
}
Fragment Shader
precision mediump float;
// our texture
uniform sampler2D u_image;
// the texCoords passed in from the vertex shader.
varying vec2 v_texCoord;
void main() {
gl_FragColor = texture2D(u_image, v_texCoord);
}
Javascript
window.onload = main;
var buffer = null;
function main() {
var image = new Image();
image.src = "images/GL.jpg"
image.onload = function() {
render(image);
}
}
function render(image) {
var c = document.getElementById("c");
c.width = window.innerWidth*0.90;
c.height = window.innerHeight*0.90;
var context = c.getContext('2d');
context.drawImage(image, 0, 0);
var imageData = context.getImageData(0,0,image.width,image.height);
buffer = imageData.data.buffer; // ArrayBuffer
var canvas = document.getElementById("canvas");
canvas.width = window.innerWidth*0.90;
canvas.height = window.innerHeight*0.90;
//Get A WebGL context
var gl = getWebGLContext(canvas);
if (!gl) {
return;
}
// setup GLSL program
var program = createProgramFromScripts(gl, ["2d-vertex-shader", "2d-fragment-shader"]);
gl.useProgram(program);
// look up where the vertex data needs to go.
var positionLocation = gl.getAttribLocation(program, "a_position");
// look up uniform locations
var u_imageLoc = gl.getUniformLocation(program, "u_image");
var u_matrixLoc = gl.getUniformLocation(program, "u_matrix");
// provide texture coordinates for the rectangle.
var positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0.0, 0.0,
1.0, 0.0,
0.0, 1.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
// Set the parameters so we can render any size image.
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
// Upload the image into the texture.
var dataTypedArray = new Uint8Array(buffer);
//gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
textureFromPixelArray(gl, buffer, gl.RGBA, canvas.width, canvas.height);
var dstX = 20;
var dstY = 30;
var dstWidth = canvas.width;
var dstHeight = canvas.height;
// convert dst pixel coords to clipspace coords
var clipX = dstX / gl.canvas.width * 2 - 1;
var clipY = dstY / gl.canvas.height * -2 + 1;
var clipWidth = dstWidth / gl.canvas.width * 2;
var clipHeight = dstHeight / gl.canvas.height * -2;
// build a matrix that will stretch our
// unit quad to our desired size and location
gl.uniformMatrix3fv(u_matrixLoc, false, [
clipWidth, 0, 0,
0, clipHeight, 0,
clipX, clipY, 1,
]);
// Draw the rectangle.
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
function textureFromPixelArray(gl, dataArray, type, width, height) {
var dataTypedArray = new Uint8Array(dataArray); // Don't need to do this if the data is already in a typed array
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, type, width, height, 0, type, gl.UNSIGNED_BYTE, dataTypedArray);
// Other texture setup here, like filter modes and mipmap generation
console.log(dataTypedArray);
return texture;
}
So first off, you can pass a canvas directly to gl.texImage2D. There's no good reason to first call ctx.getImageData and get the data out. Just call
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, someCanvas);
Second, looking at your code you first create a texture, then set filtering. You then call textureFromPixelArray, WHICH CREATES A NEW TEXTURE, that texture does not have filtering set so if it's not a power-of-2 then it won't render. Just a guess but did you check the JavaScript console? I'm just guessing it probably printed a warning about your texture not being renderable.
On top of that, even though textureFromPixelArray creates a new texture the code ignores the return value.
To make the code work as is I think you want to change it to this
// not needed -- var texture = gl.createTexture();
// not needed -- gl.bindTexture(gl.TEXTURE_2D, texture);
// moved from below
// Upload the image into the texture.
var dataTypedArray = new Uint8Array(buffer);
//gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
var texture = textureFromPixelArray(gl, buffer, gl.RGBA, canvas.width, canvas.height);
// Set the parameters so we can render any size image.
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);

Categories

Resources