Im trying to translate two 3D cubes away from each other. I've tried using the clipspace aswell as the translation matrix but nothing has worked. The solution im looking for is the cubes side-by-side preferably on the x-axis.
Here is my code:
var gl,program,canvas;
var vBuffer, vPosition;
var idxBuffer;
var vertices = [
-0.5, 0.5, 1,
0.5, 0.5, 1,
0.5, -0.5, 1,
-0.5, -0.5, 1,
-0.5, 0.5, 0,
0.5, 0.5, 0,
0.5, -0.5, 0,
-0.5, -0.5, 0
];
var dVecIdx = new Uint16Array([
0, 1, 1, 2,
2, 3, 3, 0,
4, 5, 5, 6,
6, 7, 7, 4,
0, 4, 1, 5,
2, 6, 3, 7
]);
var projection = [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 1,
0, 0, 0, 1
];
var a = Math.sqrt(0.5);
var rotation = [
a, 0, a, 0,
0, 1, 0, 0,
-a, 0, a, 0,
0, 0, 0, 1
];
window.onload = function init() {
canvas = document.getElementById("gl-canvas");
gl = WebGLUtils.setupWebGL(canvas);
if (!gl) { alert("WebGL isn't available"); }
gl.viewport(0, 0, canvas.width, canvas.height);
gl.clearColor(1.0, 1.0, 1.0, 1.0);
gl.enable(gl.DEPTH_TEST);
program = initShaders(gl, "vertex-shader", "fragment-shader");
vBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vBuffer);
gl.bufferData(gl.ARRAY_BUFFER, flatten(vertices), gl.STATIC_DRAW);
vPosition = gl.getAttribLocation(program, "vPosition");
gl.enableVertexAttribArray(vPosition);
gl.vertexAttribPointer(vPosition, 3, gl.FLOAT, false, 0, 0);
idxBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, idxBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, dVecIdx, gl.STATIC_DRAW);
render();
}
function render() {
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.useProgram(program);
gl.bindBuffer(gl.ARRAY_BUFFER, vBuffer);
projLoc = gl.getUniformLocation(program, "projectionMatrix");
loc = gl.getUniformLocation(program, "rotate");
gl.uniformMatrix4fv(projLoc, false, projection);
gl.uniformMatrix4fv(loc, false, projection);
gl.drawElements(gl.LINES, dVecIdx.length, gl.UNSIGNED_SHORT, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, vBuffer);
projLoc = gl.getUniformLocation(program, "projectionMatrix");
loc = gl.getUniformLocation(program, "rotate");
gl.uniformMatrix4fv(projLoc, false, projection);
gl.uniformMatrix4fv(loc, false, rotation);
gl.drawElements(gl.LINES, dVecIdx.length, gl.UNSIGNED_SHORT, 0);
requestAnimFrame(render);
}
<html>
<head>
<script src="https://www.cs.unm.edu/~angel/WebGL/7E/Common/initShaders.js"></script>
<script src="https://www.cs.unm.edu/~angel/WebGL/7E/Common/MV.js"></script>
<script src="https://www.cs.unm.edu/~angel/WebGL/7E/Common/webgl-utils.js"></script>
<script id="vertex-shader" type="x-shader/x-vertex">
attribute vec3 vPosition;
attribute vec4 vColor;
varying vec4 fColor;
uniform mat4 projectionMatrix;
uniform mat4 rotate;
void main() {
gl_Position = projectionMatrix * rotate * vec4(vPosition, 1);
fColor = vColor;
}
</script>
<script id="fragment-shader" type="x-shader/x-fragment">
precision mediump float;
varying vec4 fColor;
void main() {
gl_FragColor = fColor;
}
</script>
</head>
<body>
<div style="border: 1px dotted black;">
<div style="text-align:center">
<canvas id="gl-canvas" width="500" height="500"></canvas>
</div>
</div>
</body>
</html>
Any help is appreciated!
If you try getting too much more advanced with your setup I would recommend either using a WebGL library such as Three.js to abstract away some of the math or really taking the time to google around and understand object and camera transformation matrices.
Answer:
Having said that, the simple answer is to just add another matrix for translations and insert it between your projection and rotation matrix in the shader:
attribute vec3 vPosition;
attribute vec4 vColor;
varying vec4 fColor;
uniform mat4 projectionMatrix;
uniform mat4 rotate;
uniform mat4 translate;
void main() {
gl_Position = projectionMatrix * translate * rotate * vec4(vPosition, 1);
fColor = vColor;
}
The translation matrix will look like:
var translation = [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
x, y, z, 1
];
where x, y, and z are the translation distances along the X-axis, Y-axis, and Z-axis respectively.
This would then be added to the render method in the same way as the rotation matrix:
transLoc = gl.getUniformLocation(program, "translate");
gl.uniformMatrix4fv(transLoc, false, translation);
Optimizations:
Now also having said that, there are a few more optimizations/corrections you can make:
1) Since WebGL maintains its "state" until changed (keeps things bound/set/enabled/etc.), you can remove a lot of the repeated code in your render() method:
function render() {
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// set state
gl.useProgram(program);
gl.bindBuffer(gl.ARRAY_BUFFER, vBuffer);
projLoc = gl.getUniformLocation(program, "projectionMatrix");
loc = gl.getUniformLocation(program, "rotate");
gl.uniformMatrix4fv(projLoc, false, projection);
// draw shape 1
gl.uniformMatrix4fv(loc, false, projection);
gl.drawElements(gl.LINES, dVecIdx.length, gl.UNSIGNED_SHORT, 0);
// draw shape 2
gl.uniformMatrix4fv(loc, false, rotation);
gl.drawElements(gl.LINES, dVecIdx.length, gl.UNSIGNED_SHORT, 0);
requestAnimFrame(render);
}
2) If you don't want to use a specific matrix during rendering you should set it to the identity matrix which doesn't change other matrices/vectors when multiplied:
var identity = [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
];
This is what you should use for the rotation matrix on your first shape instead of the perspective matrix:
// draw shape 1
gl.uniformMatrix4fv(loc, false, identity);
gl.drawElements(gl.LINES, dVecIdx.length, gl.UNSIGNED_SHORT, 0);
3) You can declare the progLoc, rotLoc, and transLoc as global variables and set their values as soon as the program is initialized. These won't change for a single program and don't need to be reset in the render loop.
program = initShaders(gl, "vertex-shader", "fragment-shader");
projLoc = gl.getUniformLocation(program, "projectionMatrix");
rotLoc = gl.getUniformLocation(program, "rotate");
transLoc = gl.getUniformLocation(program, "translate");
Making the final code:
var gl,program,canvas;
var vBuffer, vPosition;
var idxBuffer;
var projLoc, rotLoc, transLoc;
var vertices = [
-0.5, 0.5, 1,
0.5, 0.5, 1,
0.5, -0.5, 1,
-0.5, -0.5, 1,
-0.5, 0.5, 0,
0.5, 0.5, 0,
0.5, -0.5, 0,
-0.5, -0.5, 0
];
var dVecIdx = new Uint16Array([
0, 1, 1, 2,
2, 3, 3, 0,
4, 5, 5, 6,
6, 7, 7, 4,
0, 4, 1, 5,
2, 6, 3, 7
]);
var identity = [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
];
var projection = [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 1,
0, 0, 0, 1
];
var a = Math.sqrt(0.5);
var rotation = [
a, 0, a, 0,
0, 1, 0, 0,
-a, 0, a, 0,
0, 0, 0, 1
];
// actual translations are set in the render() function
var translation = [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
];
window.onload = function init() {
canvas = document.getElementById("gl-canvas");
gl = WebGLUtils.setupWebGL(canvas);
if (!gl) { alert("WebGL isn't available"); }
gl.viewport(0, 0, canvas.width, canvas.height);
gl.clearColor(1.0, 1.0, 1.0, 1.0);
gl.enable(gl.DEPTH_TEST);
program = initShaders(gl, "vertex-shader", "fragment-shader");
projLoc = gl.getUniformLocation(program, "projectionMatrix");
rotLoc = gl.getUniformLocation(program, "rotate");
transLoc = gl.getUniformLocation(program, "translate");
vBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vBuffer);
gl.bufferData(gl.ARRAY_BUFFER, flatten(vertices), gl.STATIC_DRAW);
vPosition = gl.getAttribLocation(program, "vPosition");
gl.enableVertexAttribArray(vPosition);
gl.vertexAttribPointer(vPosition, 3, gl.FLOAT, false, 0, 0);
idxBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, idxBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, dVecIdx, gl.STATIC_DRAW);
render();
}
function render() {
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// set non-changing states
gl.useProgram(program);
gl.bindBuffer(gl.ARRAY_BUFFER, vBuffer);
gl.uniformMatrix4fv(projLoc, false, projection);
// draw shape 1
translation[12] = 1; // x-axis translation (y and z are 0)
gl.uniformMatrix4fv(transLoc, false, translation);
gl.uniformMatrix4fv(rotLoc, false, identity);
gl.drawElements(gl.LINES, dVecIdx.length, gl.UNSIGNED_SHORT, 0);
// draw shape 2
translation[12] = -1; // set x-axis translation
gl.uniformMatrix4fv(transLoc, false, translation);
gl.uniformMatrix4fv(rotLoc, false, rotation);
gl.drawElements(gl.LINES, dVecIdx.length, gl.UNSIGNED_SHORT, 0);
requestAnimFrame(render);
}
<html>
<head>
<script src="https://www.cs.unm.edu/~angel/WebGL/7E/Common/initShaders.js"></script>
<script src="https://www.cs.unm.edu/~angel/WebGL/7E/Common/MV.js"></script>
<script src="https://www.cs.unm.edu/~angel/WebGL/7E/Common/webgl-utils.js"></script>
<script id="vertex-shader" type="x-shader/x-vertex">
attribute vec3 vPosition;
attribute vec4 vColor;
varying vec4 fColor;
uniform mat4 projectionMatrix;
uniform mat4 rotate;
uniform mat4 translate;
void main() {
gl_Position = projectionMatrix * translate * rotate * vec4(vPosition, 1);
fColor = vColor;
}
</script>
<script id="fragment-shader" type="x-shader/x-fragment">
precision mediump float;
varying vec4 fColor;
void main() {
gl_FragColor = fColor;
}
</script>
</head>
<body>
<div style="border: 1px dotted black;">
<div style="text-align:center">
<canvas id="gl-canvas" width="500" height="500"></canvas>
</div>
</div>
</body>
</html>
4) If you want to use your MV.js script you can also declare your matrices as mat4() objects and use mult() to multiply the matrices on the CPU before transferring data to the GPU (one multiplication per shape instead of one per vertex). You can also use it to create more versatile and accurate camera matrices:
var persp = perspective(30.0, 1, 0.1, 100); // fovy, aspect, near, far
var view = lookAt([0, 0, 5], [0, 0, 0], [0, 1, 0]); // eye, look, up
var projection2D = mult(persp, view);
var projection = []; // convert to 1D array
for(var i = 0; i < projection2D.length; i++) {
projection = projection.concat(projection2D[i]);
}
Hope this is helpful! Cheers!
Related
I was going through this website when I came across the following code in the snippet.
I got lost in the code where positionBuffer was created, and gl.ARRAY_BUFFER was bound to it. It seems that no data was loaded onto the newly created buffer, but instead, the buffer was bound to the attribute a_postion. Later on, a new buffer texCoordBuffer was created and some data was written onto it. In the results, it seems like this same data had been loaded by webGL into the a_position attribute as well. My doubt is, how did this happen? When a new buffer was created and the buffer pointer gl.ARRAY_BUFFER was pointed to the new array, how did the positionBuffer get the same data?
// WebGL2 - 2D image 3x3 convolution
// from https://webgl2fundamentals.org/webgl/webgl-2d-image-3x3-convolution.html
"use strict";
var vertexShaderSource = `#version 300 es
// an attribute is an input (in) to a vertex shader.
// It will receive data from a buffer
in vec2 a_position;
in vec2 a_texCoord;
// Used to pass in the resolution of the canvas
uniform vec2 u_resolution;
// Used to pass the texture coordinates to the fragment shader
out vec2 v_texCoord;
// all shaders have a main function
void main() {
// convert the position from pixels to 0.0 to 1.0
vec2 zeroToOne = a_position / u_resolution;
// convert from 0->1 to 0->2
vec2 zeroToTwo = zeroToOne * 2.0;
// convert from 0->2 to -1->+1 (clipspace)
vec2 clipSpace = zeroToTwo - 1.0;
gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);
// pass the texCoord to the fragment shader
// The GPU will interpolate this value between points.
v_texCoord = a_texCoord;
}
`;
var fragmentShaderSource = `#version 300 es
// fragment shaders don't have a default precision so we need
// to pick one. highp is a good default. It means "high precision"
precision highp float;
// our texture
uniform sampler2D u_image;
// the convolution kernal data
uniform float u_kernel[9];
uniform float u_kernelWeight;
// the texCoords passed in from the vertex shader.
in vec2 v_texCoord;
// we need to declare an output for the fragment shader
out vec4 outColor;
void main() {
vec2 onePixel = vec2(1) / vec2(textureSize(u_image, 0));
vec4 colorSum =
texture(u_image, v_texCoord + onePixel * vec2(-1, -1)) * u_kernel[0] +
texture(u_image, v_texCoord + onePixel * vec2( 0, -1)) * u_kernel[1] +
texture(u_image, v_texCoord + onePixel * vec2( 1, -1)) * u_kernel[2] +
texture(u_image, v_texCoord + onePixel * vec2(-1, 0)) * u_kernel[3] +
texture(u_image, v_texCoord + onePixel * vec2( 0, 0)) * u_kernel[4] +
texture(u_image, v_texCoord + onePixel * vec2( 1, 0)) * u_kernel[5] +
texture(u_image, v_texCoord + onePixel * vec2(-1, 1)) * u_kernel[6] +
texture(u_image, v_texCoord + onePixel * vec2( 0, 1)) * u_kernel[7] +
texture(u_image, v_texCoord + onePixel * vec2( 1, 1)) * u_kernel[8] ;
outColor = vec4((colorSum / u_kernelWeight).rgb, 1);
}
`;
var image = new Image();
image.src = "https://webgl2fundamentals.org/webgl/resources/leaves.jpg"; // MUST BE SAME DOMAIN!!!
image.onload = function() {
render(image);
};
function render(image) {
// Get A WebGL context
/** #type {HTMLCanvasElement} */
var canvas = document.querySelector("#canvas");
var gl = canvas.getContext("webgl2");
if (!gl) {
return;
}
// setup GLSL program
var program = webglUtils.createProgramFromSources(gl,
[vertexShaderSource, fragmentShaderSource]);
// look up where the vertex data needs to go.
var positionAttributeLocation = gl.getAttribLocation(program, "a_position");
var texCoordAttributeLocation = gl.getAttribLocation(program, "a_texCoord");
// lookup uniforms
var resolutionLocation = gl.getUniformLocation(program, "u_resolution");
var imageLocation = gl.getUniformLocation(program, "u_image");
var kernelLocation = gl.getUniformLocation(program, "u_kernel[0]");
var kernelWeightLocation = gl.getUniformLocation(program, "u_kernelWeight");
// Create a vertex array object (attribute state)
var vao = gl.createVertexArray();
// and make it the one we're currently working with
gl.bindVertexArray(vao);
// Create a buffer and put a single pixel space rectangle in
// it (2 triangles)
var positionBuffer = gl.createBuffer();
// Turn on the attribute
gl.enableVertexAttribArray(positionAttributeLocation);
// Bind it to ARRAY_BUFFER (think of it as ARRAY_BUFFER = positionBuffer)
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Tell the attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 2; // 2 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
positionAttributeLocation, size, type, normalize, stride, offset);
// provide texture coordinates for the rectangle.
var texCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0.0, 0.0,
1.0, 0.0,
0.0, 1.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0,
]), gl.STATIC_DRAW);
// Turn on the attribute
gl.enableVertexAttribArray(texCoordAttributeLocation);
// Tell the attribute how to get data out of texCoordBuffer (ARRAY_BUFFER)
var size = 2; // 2 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
texCoordAttributeLocation, size, type, normalize, stride, offset);
// Create a texture.
var texture = gl.createTexture();
// make unit 0 the active texture uint
// (ie, the unit all other texture commands will affect
gl.activeTexture(gl.TEXTURE0 + 0);
// Bind it to texture unit 0's 2D bind point
gl.bindTexture(gl.TEXTURE_2D, texture);
// Set the parameters so we don't need mips and so we're not filtering
// and we don't repeat at the edges.
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
// Upload the image into the texture.
var mipLevel = 0; // the largest mip
var internalFormat = gl.RGBA; // format we want in the texture
var srcFormat = gl.RGBA; // format of data we are supplying
var srcType = gl.UNSIGNED_BYTE; // type of data we are supplying
gl.texImage2D(gl.TEXTURE_2D,
mipLevel,
internalFormat,
srcFormat,
srcType,
image);
// Bind the position buffer so gl.bufferData that will be called
// in setRectangle puts data in the position buffer
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Set a rectangle the same size as the image.
setRectangle(gl, 0, 0, image.width, image.height);
// Define several convolution kernels
var kernels = {
normal: [
0, 0, 0,
0, 1, 0,
0, 0, 0,
],
gaussianBlur: [
0.045, 0.122, 0.045,
0.122, 0.332, 0.122,
0.045, 0.122, 0.045,
],
gaussianBlur2: [
1, 2, 1,
2, 4, 2,
1, 2, 1,
],
gaussianBlur3: [
0, 1, 0,
1, 1, 1,
0, 1, 0,
],
unsharpen: [
-1, -1, -1,
-1, 9, -1,
-1, -1, -1,
],
sharpness: [
0, -1, 0,
-1, 5, -1,
0, -1, 0,
],
sharpen: [
-1, -1, -1,
-1, 16, -1,
-1, -1, -1,
],
edgeDetect: [
-0.125, -0.125, -0.125,
-0.125, 1, -0.125,
-0.125, -0.125, -0.125,
],
edgeDetect2: [
-1, -1, -1,
-1, 8, -1,
-1, -1, -1,
],
edgeDetect3: [
-5, 0, 0,
0, 0, 0,
0, 0, 5,
],
edgeDetect4: [
-1, -1, -1,
0, 0, 0,
1, 1, 1,
],
edgeDetect5: [
-1, -1, -1,
2, 2, 2,
-1, -1, -1,
],
edgeDetect6: [
-5, -5, -5,
-5, 39, -5,
-5, -5, -5,
],
sobelHorizontal: [
1, 2, 1,
0, 0, 0,
-1, -2, -1,
],
sobelVertical: [
1, 0, -1,
2, 0, -2,
1, 0, -1,
],
previtHorizontal: [
1, 1, 1,
0, 0, 0,
-1, -1, -1,
],
previtVertical: [
1, 0, -1,
1, 0, -1,
1, 0, -1,
],
boxBlur: [
0.111, 0.111, 0.111,
0.111, 0.111, 0.111,
0.111, 0.111, 0.111,
],
triangleBlur: [
0.0625, 0.125, 0.0625,
0.125, 0.25, 0.125,
0.0625, 0.125, 0.0625,
],
emboss: [
-2, -1, 0,
-1, 1, 1,
0, 1, 2,
],
};
var initialSelection = 'edgeDetect2';
// Setup UI to pick kernels.
var ui = document.querySelector("#ui");
var select = document.createElement("select");
for (var name in kernels) {
var option = document.createElement("option");
option.value = name;
if (name === initialSelection) {
option.selected = true;
}
option.appendChild(document.createTextNode(name));
select.appendChild(option);
}
select.onchange = function() {
drawWithKernel(this.options[this.selectedIndex].value);
};
ui.appendChild(select);
drawWithKernel(initialSelection);
function computeKernelWeight(kernel) {
var weight = kernel.reduce(function(prev, curr) {
return prev + curr;
});
return weight <= 0 ? 1 : weight;
}
function drawWithKernel(name) {
webglUtils.resizeCanvasToDisplaySize(gl.canvas);
// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
// Clear the canvas
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// Tell it to use our program (pair of shaders)
gl.useProgram(program);
// Bind the attribute/buffer set we want.
gl.bindVertexArray(vao);
// Pass in the canvas resolution so we can convert from
// pixels to clipspace in the shader
gl.uniform2f(resolutionLocation, gl.canvas.width, gl.canvas.height);
// Tell the shader to get the texture from texture unit 0
gl.uniform1i(imageLocation, 0);
// set the kernel and it's weight
gl.uniform1fv(kernelLocation, kernels[name]);
gl.uniform1f(kernelWeightLocation, computeKernelWeight(kernels[name]));
// Draw the rectangle.
var primitiveType = gl.TRIANGLES;
var offset = 0;
var count = 6;
gl.drawArrays(primitiveType, offset, count);
}
}
function setRectangle(gl, x, y, width, height) {
var x1 = x;
var x2 = x + width;
var y1 = y;
var y2 = y + height;
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
x1, y1,
x2, y1,
x1, y2,
x1, y2,
x2, y1,
x2, y2,
]), gl.STATIC_DRAW);
}
#import url("https://webgl2fundamentals.org/webgl/resources/webgl-tutorials.css");
body {
margin: 0;
}
canvas {
width: 100vw;
height: 100vh;
display: block;
}
<canvas id="canvas"></canvas>
<div id="uiContainer">
<div id="ui"></div>
</div>
<!--
for most samples webgl-utils only provides shader compiling/linking and
canvas resizing because why clutter the examples with code that's the same in every sample.
See https://webgl2fundamentals.org/webgl/lessons/webgl-boilerplate.html
and https://webgl2fundamentals.org/webgl/lessons/webgl-resizing-the-canvas.html
for webgl-utils, m3, m4, and webgl-lessons-ui.
-->
<script src="https://webgl2fundamentals.org/webgl/resources/webgl-utils.js"></script>
Given I spent more time, I see that a_position is an attribute that takes the coordinates of the screen, but a_texCoord takes clipspace coordinates of the texture.
However, down in the code (#181),
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
the buffer pointer is attached to the positionBuffer and then the setRectangle() function attaches data to the attribute a_position.
To add more to the answer, the line (#115),
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
, is put at the top while defining and binding the vertex array object vao is because the gl.vertexAttribPointer() function needs a bound buffer pointer so that the current ARRAY_BUFFER can be attached to the attribute that it is working on.
This is the prelude/simple example to kickstart my bigger idea.
Question: How can we deform the cube's vertices using a sine wave while the cube is scaling, translating or rotating.
Note: Maybe there's some post processing effect I'm not aware of for this and therefore, animating vertices are not best suited for this.
Note 2: My final goal is to push music/audio through geometry/mesh so it has more of an effect like so:
Just to clarify I would like the effect of this image above and I would also like it to be animated and be a piece of 3d geometry not 2d rastered image.
but I fear adding this audio feature is too much for one question.
That being said heres a cube being translated,scaled,rotated. The cube has a light source using normals and color:
var gl,
shaderProgram,
vertices,
matrix = mat4.create(),
vertexCount,
indexCount,
q = quat.create(),
translate =[-3, 0, -10],
scale = [1,1,1],
pivot = [0,0,0];
translate2 = [0, 0, -8],
scale2 = [3,3,3],
pivot2 = [1,1,1]
initGL();
createShaders();
createVertices();
draw();
function initGL() {
var canvas = document.getElementById("canvas");
gl = canvas.getContext("webgl");
gl.enable(gl.DEPTH_TEST);
gl.viewport(0, 0, canvas.width, canvas.height);
gl.clearColor(1, 1, 1, 1);
}
function createShaders() {
var vertexShader = getShader(gl, "shader-vs");
var fragmentShader = getShader(gl, "shader-fs");
shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
gl.useProgram(shaderProgram);
}
function createVertices() {
vertices = [
[-1, -1, -1, 1, 0, 0, 1], // 0
[ 1, -1, -1, 1, 1, 0, 1], // 1
[-1, 1, -1, 0, 1, 1, 1], // 2
[ 1, 1, -1, 0, 0, 1, 1], // 3
[-1, 1, 1, 1, 0.5, 0, 1], // 4
[1, 1, 1, 0.5, 1, 1, 1], // 5
[-1, -1, 1, 1, 0, 0.5, 1], // 6
[1, -1, 1, 0.5, 0, 1, 1], // 7
];
var normals = [
[0, 0, 1], [0, 1, 0], [0, 0, -1],
[0, -1, 0], [-1, 0, 0], [1, 0, 0] ];
var indices = [
[0, 1, 2, 1, 2, 3],
[2, 3, 4, 3, 4, 5],
[4, 5, 6, 5, 6, 7],
[6, 7, 0, 7, 0, 1],
[0, 2, 6, 2, 6, 4],
[1, 3, 7, 3, 7, 5]
];
var attributes = []
for(let side=0; side < indices.length; ++side) {
for(let vi=0; vi < indices[side].length; ++vi) {
attributes.push(...vertices[indices[side][vi]]);
attributes.push(...normals[side]);
}
}
vertexCount = attributes.length / 10;
var buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(attributes), gl.STATIC_DRAW);
var coords = gl.getAttribLocation(shaderProgram, "coords");
gl.vertexAttribPointer(coords, 3, gl.FLOAT, false, Float32Array.BYTES_PER_ELEMENT * 10, 0);
gl.enableVertexAttribArray(coords);
var colorsLocation = gl.getAttribLocation(shaderProgram, "colors");
gl.vertexAttribPointer(colorsLocation, 4, gl.FLOAT, false, Float32Array.BYTES_PER_ELEMENT * 10, Float32Array.BYTES_PER_ELEMENT * 3);
gl.enableVertexAttribArray(colorsLocation);
var normalLocation = gl.getAttribLocation(shaderProgram, "normal");
gl.vertexAttribPointer(normalLocation, 3, gl.FLOAT, false, Float32Array.BYTES_PER_ELEMENT * 10, Float32Array.BYTES_PER_ELEMENT * 7);
gl.enableVertexAttribArray(normalLocation);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
var lightColor = gl.getUniformLocation(shaderProgram, "lightColor");
gl.uniform3f(lightColor, 1, 1, 1);
var lightDirection = gl.getUniformLocation(shaderProgram, "lightDirection");
gl.uniform3f(lightDirection, 0.5, 0.5, -1);
var perspectiveMatrix = mat4.create();
mat4.perspective(perspectiveMatrix, 1, canvas.width / canvas.height, 0.1, 11);
var perspectiveLoc = gl.getUniformLocation(shaderProgram, "perspectiveMatrix");
gl.uniformMatrix4fv(perspectiveLoc, false, perspectiveMatrix);
}
function draw(timeMs) {
requestAnimationFrame(draw);
let interval = timeMs / 3000
let t = interval - Math.floor(interval);
let trans_t = vec3.lerp([], translate, translate2, t);
let scale_t = vec3.lerp([], scale, scale2, t);
let pivot_t = vec3.lerp([], pivot, pivot2, t);
let quat_t = quat.slerp(quat.create(), q, [1,0,1,1], t /2);
mat4.fromRotationTranslationScaleOrigin(matrix, quat_t, trans_t, scale_t, pivot_t);
var transformMatrix = gl.getUniformLocation(shaderProgram, "transformMatrix");
gl.uniformMatrix4fv(transformMatrix, false, matrix);
gl.clear(gl.COLOR_BUFFER_BIT);
//gl.drawElements(gl.TRIANGLES, indexCount, gl.UNSIGNED_BYTE, 0);
gl.drawArrays(gl.TRIANGLES, 0, vertexCount);
}
/*
* https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/Tutorial/Adding_2D_content_to_a_WebGL_context
*/
function getShader(gl, id) {
var shaderScript, theSource, currentChild, shader;
shaderScript = document.getElementById(id);
if (!shaderScript) {
return null;
}
theSource = "";
currentChild = shaderScript.firstChild;
while (currentChild) {
if (currentChild.nodeType == currentChild.TEXT_NODE) {
theSource += currentChild.textContent;
}
currentChild = currentChild.nextSibling;
}
if (shaderScript.type == "x-shader/x-fragment") {
shader = gl.createShader(gl.FRAGMENT_SHADER);
} else if (shaderScript.type == "x-shader/x-vertex") {
shader = gl.createShader(gl.VERTEX_SHADER);
} else {
// Unknown shader type
return null;
}
gl.shaderSource(shader, theSource);
// Compile the shader program
gl.compileShader(shader);
// See if it compiled successfully
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert("An error occurred compiling the shaders: " + gl.getShaderInfoLog(shader));
return null;
}
return shader;
}
<canvas id="canvas" width="600" height="600"></canvas>
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/gl-matrix/2.3.2/gl-matrix-min.js"></script>
<script id="shader-vs" type="x-shader/x-vertex">
attribute vec4 coords;
uniform mat4 transformMatrix;
attribute vec3 normal;
attribute vec4 colors;
uniform vec3 lightColor;
uniform vec3 lightDirection;
varying vec4 varyingColors;
uniform mat4 perspectiveMatrix;
void main(void) {
vec3 norm = normalize(normal);
vec3 ld = normalize(lightDirection);
float dotProduct = max(dot(norm, ld), 0.0);
vec3 vertexColor = lightColor * colors.rgb * dotProduct;
varyingColors = vec4(vertexColor, 1);
gl_Position = perspectiveMatrix * transformMatrix * coords;
}
</script>
<script id="shader-fs" type="x-shader/x-fragment">
precision mediump float;
uniform vec4 color;
varying vec4 varyingColors;
void main(void) {
gl_FragColor = varyingColors;
}
</script>
I think reasonable way to do it is vertex displacement.
That is adding an offset to vertex position.
To make it work you need to tesselate your cube or take some other mesh with high polygon count. Then you can tie your sine phase to position.
Sort of:
float amplitude = 0.1;
vec3 offset = vec3(sin(globalTime + coords.y * 10.0), 0.0, 0.0) * amplitude;
gl_Position = perspectiveMatrix * transformMatrix * (coords + offset);
For using audio as input you can get a spectrum from WebAudio api and use some bar value as amplitude. Low frequency value initially works well since that's there kicks sound at. Consider asking api for low detail frequency data (few wide bars).
Also at that point some spectrum filtering might be required to smooth visual effect. For example interpolating spectrum data across few last frames.
Using multiple freq bars as input can result in nice equalizer effect. To make it work you can bake bar index as geometry attribute and displace based on that bar value.
How can I add a direction light to a transforming cube in webGL.
I know it requires normals and i've add them in the snippet (i've commented them out)
It will also require some math in the vertex shader. unfortunatly this code doesnt work when i uncomment.
attribute vec4 coords;
uniform mat4 transformMatrix;
attribute vec3 normal;
attribute vec4 colors;
uniform vec3 lightColor;
uniform vec3 lightDirection;
varying vec4 varyingColors;
uniform mat4 perspectiveMatrix;
void main(void) {
vec3 norm = normalize(normal);
vec3 ld = normalize(lightDirection);
float dotProduct = max(dot(norm, ld), 0.0);
vec3 vertexColor = lightColor * colors * dotProduct;
varyingColors = vec4(vertexColor, 1);
gl_Position = perspectiveMatrix * transformMatrix * coords;
}
var gl,
shaderProgram,
vertices,
matrix = mat4.create(),
vertexCount,
indexCount,
q = quat.create(),
translate =[-3, 0, -10],
scale = [1,1,1],
pivot = [0,0,0];
translate2 = [0, 0, -8],
scale2 = [3,3,3],
pivot2 = [1,1,1]
initGL();
createShaders();
createVertices();
createIndices();
draw();
function initGL() {
var canvas = document.getElementById("canvas");
gl = canvas.getContext("webgl");
gl.enable(gl.DEPTH_TEST);
gl.viewport(0, 0, canvas.width, canvas.height);
gl.clearColor(1, 1, 1, 1);
}
function createShaders() {
var vertexShader = getShader(gl, "shader-vs");
var fragmentShader = getShader(gl, "shader-fs");
shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
gl.useProgram(shaderProgram);
}
function createVertices() {
vertices = [
-1, -1, -1, 1, 0, 0, 1, // 0
1, -1, -1, 1, 1, 0, 1, // 1
-1, 1, -1, 0, 1, 1, 1, // 2
1, 1, -1, 0, 0, 1, 1, // 3
-1, 1, 1, 1, 0.5, 0, 1, // 4
1, 1, 1, 0.5, 1, 1, 1, // 5
-1, -1, 1, 1, 0, 0.5, 1, // 6
1, -1, 1, 0.5, 0, 1, 1, // 7
];
vertexCount = vertices.length / 7;
var buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
var coords = gl.getAttribLocation(shaderProgram, "coords");
gl.vertexAttribPointer(coords, 3, gl.FLOAT, false, Float32Array.BYTES_PER_ELEMENT * 7, 0);
gl.enableVertexAttribArray(coords);
/**
var normals = [
0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1,
0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1,
0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0,
-1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0,
1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0
];
var normalBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, normalBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(normals), gl.STATIC_DRAW);
var normalLocation = gl.getAttribLocation(shaderProgram, "normal");
gl.vertexAttribPointer(normalLocation, 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(normalLocation);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
var lightColor = gl.getUniformLocation(shaderProgram, "lightColor");
gl.uniform3f(lightColor, 1, 1, 1);
var lightDirection = gl.getUniformLocation(shaderProgram, "lightDirection");
gl.uniform3f(lightDirection, 0.5, 1, 0);
*/
var colorsLocation = gl.getAttribLocation(shaderProgram, "colors");
gl.vertexAttribPointer(colorsLocation, 4, gl.FLOAT, false, Float32Array.BYTES_PER_ELEMENT * 7, Float32Array.BYTES_PER_ELEMENT * 3);
gl.enableVertexAttribArray(colorsLocation);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
var perspectiveMatrix = mat4.create();
mat4.perspective(perspectiveMatrix, 1, canvas.width / canvas.height, 0.1, 11);
var perspectiveLoc = gl.getUniformLocation(shaderProgram, "perspectiveMatrix");
gl.uniformMatrix4fv(perspectiveLoc, false, perspectiveMatrix);
}
function createIndices() {
var indices = [
0, 1, 2, 1, 2, 3,
2, 3, 4, 3, 4, 5,
4, 5, 6, 5, 6, 7,
6, 7, 0, 7, 0, 1,
0, 2, 6, 2, 6, 4,
1, 3, 7, 3, 7, 5
];
indexCount = indices.length;
var indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint8Array(indices), gl.STATIC_DRAW);
}
function draw(timeMs) {
requestAnimationFrame(draw);
let interval = timeMs / 3000
let t = interval - Math.floor(interval);
let trans_t = vec3.lerp([], translate, translate2, t);
let scale_t = vec3.lerp([], scale, scale2, t);
let pivot_t = vec3.lerp([], pivot, pivot2, t);
mat4.fromRotationTranslationScaleOrigin(matrix, q, trans_t, scale_t, pivot_t);
var transformMatrix = gl.getUniformLocation(shaderProgram, "transformMatrix");
gl.uniformMatrix4fv(transformMatrix, false, matrix);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.drawElements(gl.TRIANGLES, indexCount, gl.UNSIGNED_BYTE, 0);
}
/*
* https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/Tutorial/Adding_2D_content_to_a_WebGL_context
*/
function getShader(gl, id) {
var shaderScript, theSource, currentChild, shader;
shaderScript = document.getElementById(id);
if (!shaderScript) {
return null;
}
theSource = "";
currentChild = shaderScript.firstChild;
while (currentChild) {
if (currentChild.nodeType == currentChild.TEXT_NODE) {
theSource += currentChild.textContent;
}
currentChild = currentChild.nextSibling;
}
if (shaderScript.type == "x-shader/x-fragment") {
shader = gl.createShader(gl.FRAGMENT_SHADER);
} else if (shaderScript.type == "x-shader/x-vertex") {
shader = gl.createShader(gl.VERTEX_SHADER);
} else {
// Unknown shader type
return null;
}
gl.shaderSource(shader, theSource);
// Compile the shader program
gl.compileShader(shader);
// See if it compiled successfully
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert("An error occurred compiling the shaders: " + gl.getShaderInfoLog(shader));
return null;
}
return shader;
}
<script id="shader-vs" type="x-shader/x-vertex">
attribute vec4 coords;
attribute float pointSize;
uniform mat4 transformMatrix;
attribute vec4 colors;
varying vec4 varyingColors;
uniform mat4 perspectiveMatrix;
void main(void) {
gl_Position = perspectiveMatrix * transformMatrix * coords;
gl_PointSize = pointSize;
varyingColors = colors;
}
</script>
<script id="shader-fs" type="x-shader/x-fragment">
precision mediump float;
uniform vec4 color;
varying vec4 varyingColors;
void main(void) {
gl_FragColor = varyingColors;
}
</script>
<canvas id="canvas" width="600" height="600"></canvas>
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/gl-matrix/2.3.2/gl-matrix-min.js"></script>
There is an issue in your shader code:
vec3 vertexColor = lightColor * colors * dotProduct;
The type of color is vec4, but the type of vertexColor is vec3, so it has to be:
vec3 vertexColor = lightColor * colors.rgb * dotProduct;
You can't assigne a variable of type vec4 to a variable of type vec3.
When gl.vertexAttribPointer is called, then the proper vertex buffer has to be bound before. At the point where you specify the colorsLocation attribute, the buffer with the normals is bound, rather than than the buffer with the vertices and colors (vertices).
A vertex coordinate and its attributes are tuples. If a vertex coordinate is used twice (or 3 times for in three sides) and each time it is associated to a different normal vector, then the coordinate has to be "duplicated". Each vertex coordinate is associated to exactly 1 normal vector.
See Rendering meshes with multiple indices.
Imagine a 3 dimensional vertex coordinate and a 3 dimensional normal vector as 1, but 6 dimensional attribute.
Create a linearised array of attributes:
vertices = [
[-1, -1, -1, 1, 0, 0, 1], // 0
[ 1, -1, -1, 1, 1, 0, 1], // 1
[-1, 1, -1, 0, 1, 1, 1], // 2
[ 1, 1, -1, 0, 0, 1, 1], // 3
[-1, 1, 1, 1, 0.5, 0, 1], // 4
[1, 1, 1, 0.5, 1, 1, 1], // 5
[-1, -1, 1, 1, 0, 0.5, 1], // 6
[1, -1, 1, 0.5, 0, 1, 1], // 7
];
var normals = [
[0, 0, 1], [0, 1, 0], [0, 0, -1],
[0, -1, 0], [-1, 0, 0], [1, 0, 0] ];
var indices = [
[0, 1, 2, 1, 2, 3],
[2, 3, 4, 3, 4, 5],
[4, 5, 6, 5, 6, 7],
[6, 7, 0, 7, 0, 1],
[0, 2, 6, 2, 6, 4],
[1, 3, 7, 3, 7, 5]
];
var attributes = []
for(let side=0; side < indices.length; ++side) {
for(let vi=0; vi < indices[side].length; ++vi) {
attributes.push(...vertices[indices[side][vi]]);
attributes.push(...normals[side]);
}
}
vertexCount = attributes.length / 10;
Create the buffer and define the generic arrays of vertex attributes:
var buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(attributes), gl.STATIC_DRAW);
var coords = gl.getAttribLocation(shaderProgram, "coords");
gl.vertexAttribPointer(coords, 3, gl.FLOAT, false, Float32Array.BYTES_PER_ELEMENT * 10, 0);
gl.enableVertexAttribArray(coords);
var colorsLocation = gl.getAttribLocation(shaderProgram, "colors");
gl.vertexAttribPointer(colorsLocation, 4, gl.FLOAT, false, Float32Array.BYTES_PER_ELEMENT * 10, Float32Array.BYTES_PER_ELEMENT * 3);
gl.enableVertexAttribArray(colorsLocation);
var normalLocation = gl.getAttribLocation(shaderProgram, "normal");
gl.vertexAttribPointer(normalLocation, 3, gl.FLOAT, false, Float32Array.BYTES_PER_ELEMENT * 10, Float32Array.BYTES_PER_ELEMENT * 7);
gl.enableVertexAttribArray(normalLocation);
Use .drawArrays rather than .drawElements to draw the mesh:
//gl.drawElements(gl.TRIANGLES, indexCount, gl.UNSIGNED_BYTE, 0);
gl.drawArrays(gl.TRIANGLES, 0, vertexCount);
See the example, where I applied the suggestions to your code:
var gl,
shaderProgram,
vertices,
matrix = mat4.create(),
vertexCount,
indexCount,
q = quat.create(),
translate =[-3, 0, -10],
scale = [1,1,1],
pivot = [0,0,0];
translate2 = [0, 0, -8],
scale2 = [3,3,3],
pivot2 = [1,1,1]
initGL();
createShaders();
createVertices();
draw();
function initGL() {
var canvas = document.getElementById("canvas");
gl = canvas.getContext("webgl");
gl.enable(gl.DEPTH_TEST);
gl.viewport(0, 0, canvas.width, canvas.height);
gl.clearColor(1, 1, 1, 1);
}
function createShaders() {
var vertexShader = getShader(gl, "shader-vs");
var fragmentShader = getShader(gl, "shader-fs");
shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
gl.useProgram(shaderProgram);
}
function createVertices() {
vertices = [
[-1, -1, -1, 1, 0, 0, 1], // 0
[ 1, -1, -1, 1, 1, 0, 1], // 1
[-1, 1, -1, 0, 1, 1, 1], // 2
[ 1, 1, -1, 0, 0, 1, 1], // 3
[-1, 1, 1, 1, 0.5, 0, 1], // 4
[1, 1, 1, 0.5, 1, 1, 1], // 5
[-1, -1, 1, 1, 0, 0.5, 1], // 6
[1, -1, 1, 0.5, 0, 1, 1], // 7
];
var normals = [
[0, 0, 1], [0, 1, 0], [0, 0, -1],
[0, -1, 0], [-1, 0, 0], [1, 0, 0] ];
var indices = [
[0, 1, 2, 1, 2, 3],
[2, 3, 4, 3, 4, 5],
[4, 5, 6, 5, 6, 7],
[6, 7, 0, 7, 0, 1],
[0, 2, 6, 2, 6, 4],
[1, 3, 7, 3, 7, 5]
];
var attributes = []
for(let side=0; side < indices.length; ++side) {
for(let vi=0; vi < indices[side].length; ++vi) {
attributes.push(...vertices[indices[side][vi]]);
attributes.push(...normals[side]);
}
}
vertexCount = attributes.length / 10;
var buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(attributes), gl.STATIC_DRAW);
var coords = gl.getAttribLocation(shaderProgram, "coords");
gl.vertexAttribPointer(coords, 3, gl.FLOAT, false, Float32Array.BYTES_PER_ELEMENT * 10, 0);
gl.enableVertexAttribArray(coords);
var colorsLocation = gl.getAttribLocation(shaderProgram, "colors");
gl.vertexAttribPointer(colorsLocation, 4, gl.FLOAT, false, Float32Array.BYTES_PER_ELEMENT * 10, Float32Array.BYTES_PER_ELEMENT * 3);
gl.enableVertexAttribArray(colorsLocation);
var normalLocation = gl.getAttribLocation(shaderProgram, "normal");
gl.vertexAttribPointer(normalLocation, 3, gl.FLOAT, false, Float32Array.BYTES_PER_ELEMENT * 10, Float32Array.BYTES_PER_ELEMENT * 7);
gl.enableVertexAttribArray(normalLocation);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
var lightColor = gl.getUniformLocation(shaderProgram, "lightColor");
gl.uniform3f(lightColor, 1, 1, 1);
var lightDirection = gl.getUniformLocation(shaderProgram, "lightDirection");
gl.uniform3f(lightDirection, 0.5, 0.5, -1);
var perspectiveMatrix = mat4.create();
mat4.perspective(perspectiveMatrix, 1, canvas.width / canvas.height, 0.1, 11);
var perspectiveLoc = gl.getUniformLocation(shaderProgram, "perspectiveMatrix");
gl.uniformMatrix4fv(perspectiveLoc, false, perspectiveMatrix);
}
function draw(timeMs) {
requestAnimationFrame(draw);
let interval = timeMs / 3000
let t = interval - Math.floor(interval);
let trans_t = vec3.lerp([], translate, translate2, t);
let scale_t = vec3.lerp([], scale, scale2, t);
let pivot_t = vec3.lerp([], pivot, pivot2, t);
mat4.fromRotationTranslationScaleOrigin(matrix, q, trans_t, scale_t, pivot_t);
var transformMatrix = gl.getUniformLocation(shaderProgram, "transformMatrix");
gl.uniformMatrix4fv(transformMatrix, false, matrix);
gl.clear(gl.COLOR_BUFFER_BIT);
//gl.drawElements(gl.TRIANGLES, indexCount, gl.UNSIGNED_BYTE, 0);
gl.drawArrays(gl.TRIANGLES, 0, vertexCount);
}
/*
* https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/Tutorial/Adding_2D_content_to_a_WebGL_context
*/
function getShader(gl, id) {
var shaderScript, theSource, currentChild, shader;
shaderScript = document.getElementById(id);
if (!shaderScript) {
return null;
}
theSource = "";
currentChild = shaderScript.firstChild;
while (currentChild) {
if (currentChild.nodeType == currentChild.TEXT_NODE) {
theSource += currentChild.textContent;
}
currentChild = currentChild.nextSibling;
}
if (shaderScript.type == "x-shader/x-fragment") {
shader = gl.createShader(gl.FRAGMENT_SHADER);
} else if (shaderScript.type == "x-shader/x-vertex") {
shader = gl.createShader(gl.VERTEX_SHADER);
} else {
// Unknown shader type
return null;
}
gl.shaderSource(shader, theSource);
// Compile the shader program
gl.compileShader(shader);
// See if it compiled successfully
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert("An error occurred compiling the shaders: " + gl.getShaderInfoLog(shader));
return null;
}
return shader;
}
<canvas id="canvas" width="600" height="600"></canvas>
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/gl-matrix/2.3.2/gl-matrix-min.js"></script>
<script id="shader-vs" type="x-shader/x-vertex">
attribute vec4 coords;
uniform mat4 transformMatrix;
attribute vec3 normal;
attribute vec4 colors;
uniform vec3 lightColor;
uniform vec3 lightDirection;
varying vec4 varyingColors;
uniform mat4 perspectiveMatrix;
void main(void) {
vec3 norm = normalize(normal);
vec3 ld = normalize(lightDirection);
float dotProduct = max(dot(norm, ld), 0.0);
vec3 vertexColor = lightColor * colors.rgb * dotProduct;
varyingColors = vec4(vertexColor, 1);
gl_Position = perspectiveMatrix * transformMatrix * coords;
}
</script>
<script id="shader-fs" type="x-shader/x-fragment">
precision mediump float;
uniform vec4 color;
varying vec4 varyingColors;
void main(void) {
gl_FragColor = varyingColors;
}
</script>
I am creating a simple WebGL program that creates a cube rotating around the Z axis. The problem is that when its rotating some sides of it override the others and you don't see each individual side of the cube. I looked for a solution online but it makes it worse. They enable the face culling and for them, it seems to work but for me, unfortunately, it doesn't. Can anyone tell me what do I need to add into my code in order to fix the face overriding?
Full Source Code:
var canvas = document.getElementById("canvas");
var gl = canvas.getContext("webgl");
var a = 24;
var positions = [
-0.5, -0.5, 0.0,
0.5, -0.5, 0.0,
0.5, 0.5, 0.0,
-0.5, 0.5, 0.0,
-0.5, -0.5, 0.5,
0.5, -0.5, 0.5,
0.5, 0.5, 0.5,
-0.5, 0.5, 0.5,
//0.5, 0.5, 1.0,
];
var indices = [
0, 1, 2, 0, 2, 3, // Front
0, 4, 7, 0, 3, 7, // Side 1
1, 5, 6, 1, 2, 6, // Side 2
4, 5, 6, 6, 7, 4 // Back
];
var rotation_angle = 1;
var radians = (rotation_angle * Math.PI)/180;
var rotation = [Math.sin(radians), 0.0, Math.cos(radians)];
var _buffer = gl.createBuffer();
var i_buffer = gl.createBuffer();
var vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, `
precision mediump float;
attribute vec3 position;
uniform vec3 rotation;
varying vec3 f_col;
void main(){
vec2 newPos = vec2(
position.x * rotation.x - position.z * rotation.z,
position.x * rotation.z + position.z * rotation.x
);
gl_Position = vec4(newPos.x, position.y, newPos.y, 1.0);
f_col = position;
}
`);
gl.compileShader(vertexShader);
// Check if it compiled
var success2 = gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS);
if (!success2) {
// Something went wrong during compilation; get the error
throw "could not compile shader:" + gl.getShaderInfoLog(vertexShader);
}
var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, `
precision mediump float;
varying vec3 f_col;
void main() {
gl_FragColor = vec4(f_col.x, 0.2, f_col.z, 1.0);
}
`);
gl.compileShader(fragmentShader);
var success1 = gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS);
if (!success1) {
// Something went wrong during compilation; get the error
throw "could not compile shader:" + gl.getShaderInfoLog(fragmentShader);
}
var program = gl.createProgram();
// Attach pre-existing shaders
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
var attributeLoc = gl.getAttribLocation(program, "position");
gl.bindBuffer(gl.ARRAY_BUFFER, _buffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions),
gl.STATIC_DRAW);
gl.vertexAttribPointer(attributeLoc, 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(attributeLoc);
gl.useProgram(program);
var rotation_location = gl.getUniformLocation(program, "rotation");
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, i_buffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(indices),
gl.STATIC_DRAW);
function loop() {
gl.clearColor(0.75, 0.85, 0.8, 1);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
//gl.drawArrays(gl.TRIANGLE_FAN, 0, 8);
gl.drawElements(gl.TRIANGLES, a, gl.UNSIGNED_SHORT, 0);
//gl.drawArrays(gl.TRIANGLE_STRIP, 1, 3);
rotation_angle++;
var radians = (rotation_angle * Math.PI)/180;
var rotation = [Math.sin(radians), 0.0, Math.cos(radians)];
gl.uniform3fv(rotation_location, rotation);
requestAnimationFrame(loop);
}
loop();
<canvas id="canvas" width="500" height="500"></canvas>
I have been trying to understand framebuffer in WebGL/OpenGL-ES.
I know that we can blend multiple textures using framebuffer.
So, to understand that I wrote a sample by taking a 1*1 texture and tried to apply framebuffer logic on top of it.
But , it didn't work.
See snippet at bottom, if you click on "mix red and blue", the images doesn't get rendered, am I doing anything wrong?
Code :
`
var canvas, gl, attrPosition, texture, program, vertexBuffer, textureBuffer, vertices, texVertices, attrPos, attrTexPos, textures = [], framebuffers = [];
canvas = document.getElementById('canvas');
gl = getWebGL();
vertices = new Float32Array([
-1.0, -1.0,
1.0, -1.0,
1.0, 1.0,
-1.0, 1.0,
-1.0, -1.0,
]);
texVertices = new Float32Array([
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
0.0, 0.0
]);
var getProgram = function () {
var vs = createVertexShader([
'attribute vec2 attrPos;',
'attribute vec2 attrTexPos;',
'varying highp vec2 vTexCoord;',
'void main() {',
'\tgl_Position = vec4(attrPos, 0.0, 1.0);',
'}'
].join('\n'));
var fs = createFragmentShader([
'varying highp vec2 vTexCoord;',
'uniform sampler2D uImage;',
'void main() {',
'\tgl_FragColor = texture2D(uImage, vTexCoord);',
'}'
].join('\n'));
return createAndLinkPrograms(vs, fs);
};
var render = function () {
gl.clear(gl.DEPTH_BUFFER_BIT|gl.COLOR_BUFFER_BIT);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.vertexAttribPointer(attrPos, 2, gl.FLOAT, gl.FALSE, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, textureBuffer);
gl.vertexAttribPointer(attrTexPos, 2, gl.FLOAT, gl.FALSE, 0, 0);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 5);
};
if (gl) {
gl.clearColor(0.1, 0.5, 1.0, 1.0);
render();
program = getProgram();
texture = createAndSetupTexture();
vertexBuffer = createAndBindBuffer(vertices, gl.ARRAY_BUFFER);
attrPos = gl.getUniformLocation(program, 'attrPos');
gl.enableVertexAttribArray(attrPos);
textureBuffer = createAndBindBuffer(texVertices, gl.ARRAY_BUFFER);
attrTexPos = gl.getUniformLocation(program, 'attrTexPos');
gl.enableVertexAttribArray(attrTexPos);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([123, 0, 60, 255]));
render();
}
var initPingPongTextures = function(textures, framebuffers) {
for (var i = 0; i < 2; ++i) {
var tex = createAndSetupTexture(gl);
textures.push(tex);
// make the texture the same size as the image
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
// Create a framebuffer
var fbo = gl.createFramebuffer();
framebuffers.push(fbo);
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
// Attach a texture to it.
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
}
}
var setFramebuffer = function(fbo, width, height) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
gl.viewport(0, 0, width, height);
};
var mixRedAndBlue = function () {
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
setFramebuffer(framebuffers[0], 1, 1);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([255, 0, 0, 255]));
render();
gl.bindTexture(gl.TEXTURE_2D, textures[0]);
setFramebuffer(framebuffers[1], 1, 1);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0, 255, 0, 255]));
render();
gl.bindTexture(gl.TEXTURE_2D, textures[1]);
setFramebuffer(null, 1, 1);
render();
};`
var getWebGLContext = function(canvas) {
var webglContextParams = ['webgl', 'experimental-webgl', 'webkit-3d', 'moz-webgl'];
var webglContext = null;
for (var index = 0; index < webglContextParams.length; index++) {
try {
webglContext = canvas.getContext(webglContextParams[index]);
if(webglContext) {
//breaking as we got our context
break;
}
} catch (E) {
console.log(E);
}
}
if(webglContext === null) {
alert('WebGL is not supported on your browser.');
} else {
//WebGL is supported in your browser, lets render the texture
}
fillGLForCleanUp(webglContext);
return webglContext;
}
var createVertexShader = function (vertexShaderSource) {
console.log(vertexShaderSource);
var vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertexShaderSource);
gl.compileShader(vertexShader);
return vertexShader;
}
var createFragmentShader = function (fragmentShaderSource) {
console.log(fragmentShaderSource);
var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragmentShaderSource);
gl.compileShader(fragmentShader);
return fragmentShader;
}
var createAndLinkPrograms = function (vertexShader, fragmentShader) {
var program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
alert('Could not initialise shaders');
}
gl.useProgram(program);
return program;
}
var createAndBindBuffer = function (verticesOrIndices, bufferType) {
var buffer = gl.createBuffer();
gl.bindBuffer(bufferType, buffer);
gl.bufferData(bufferType, verticesOrIndices, gl.STATIC_DRAW);
//clear memory
gl.bindBuffer(bufferType, null);
return buffer;
}
var allowAllImageSizes = function() {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
// gl.bindTexture(gl.TEXTURE_2D, null);
}
var createAndSetupTexture = function() {
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
allowAllImageSizes();
gl.textures.push(texture);
return texture;
}
var getWebGL = function (canvas, width, height) {
if(!canvas) {
canvas = document.createElement('canvas');
canvas.id = 'canvas';
canvas.width = !width ? 512 : width;
canvas.height = !height ? 512 : height;
document.body.appendChild(canvas);
} else {
canvas.width = !width ? 512 : width;
canvas.height = !height ? 512 : height;
}
return getWebGLContext(canvas);
}
var fillGLForCleanUp = function (gl) {
gl.textures = [];
gl.framebuffers = [];
gl.array_buffer = [];
gl.element_array_buffers = [];
}
var canvas, gl, attrPosition, texture, program,
vertexBuffer, textureBuffer, vertices, texVertices,
attrPos, attrTexPos, textures = [], framebuffers = [];
canvas = document.getElementById('canvas');
gl = getWebGL(canvas);
vertices = new Float32Array([
-1.0, -1.0,
1.0, -1.0,
1.0, 1.0,
-1.0, 1.0,
-1.0, -1.0,
]);
texVertices = new Float32Array([
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
0.0, 0.0
]);
var getProgram = function () {
var vs = createVertexShader([
'attribute vec2 attrPos;',
'attribute vec2 attrTexPos;',
'varying highp vec2 vTexCoord;',
'void main() {',
'\tgl_Position = vec4(attrPos, 0.0, 1.0);',
'}'
].join('\n'));
var fs = createFragmentShader([
'varying highp vec2 vTexCoord;',
'uniform sampler2D uImage;',
'void main() {',
'\tgl_FragColor = texture2D(uImage, vTexCoord);',
'}'
].join('\n'));
return createAndLinkPrograms(vs, fs);
};
var render = function () {
gl.clear(gl.DEPTH_BUFFER_BIT|gl.COLOR_BUFFER_BIT);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.vertexAttribPointer(attrPos, 2, gl.FLOAT, gl.FALSE, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, textureBuffer);
gl.vertexAttribPointer(attrTexPos, 2, gl.FLOAT, gl.FALSE, 0, 0);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 5);
};
if (gl) {
gl.clearColor(0.1, 0.5, 1.0, 1.0);
render();
program = getProgram();
texture = createAndSetupTexture();
vertexBuffer = createAndBindBuffer(vertices, gl.ARRAY_BUFFER);
attrPos = gl.getUniformLocation(program, 'attrPos');
gl.enableVertexAttribArray(attrPos);
textureBuffer = createAndBindBuffer(texVertices, gl.ARRAY_BUFFER);
attrTexPos = gl.getUniformLocation(program, 'attrTexPos');
gl.enableVertexAttribArray(attrTexPos);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([123, 0, 60, 255]));
render();
}
var initPingPongTextures = function(textures, framebuffers) {
for (var i = 0; i < 2; ++i) {
var tex = createAndSetupTexture(gl);
textures.push(tex);
// make the texture the same size as the image
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
// Create a framebuffer
var fbo = gl.createFramebuffer();
framebuffers.push(fbo);
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
// Attach a texture to it.
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
}
}
var setFramebuffer = function(fbo, width, height) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
gl.viewport(0, 0, width, height);
};
var mixRedAndBlue = function () {
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
setFramebuffer(framebuffers[0], 1, 1);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([255, 0, 0, 255]));
render();
gl.bindTexture(gl.TEXTURE_2D, textures[0]);
setFramebuffer(framebuffers[1], 1, 1);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0, 255, 0, 255]));
render();
gl.bindTexture(gl.TEXTURE_2D, textures[1]);
setFramebuffer(null, 1, 1);
render();
};
<button id="redImg" onclick="mixRedAndBlue()">Mix Red and blue</button><hr/>
<canvas id="canvas" width=512 height=512></canvas>
Edit 1 :
I am trying to achieve the same for multiple programs with multiple fragment shaders because having if/else statements within the fragment shader is not recommended as it runs for each pixel.
`
Shaders.prototype.VS_Base = [
'attribute vec3 verticesPosition;',
'attribute vec2 texturePosition;',
'varying highp vec2 vTextureCoord;',
'void main(void) {',
'\tgl_Position = vec4(verticesPosition * vec3(1.0, -1.0, 1.0), 0.5);',
'\tvTextureCoord = texturePosition;',
'}'
].join('\n');
Shaders.prototype.FS_Base_Image_RED = [
'#ifdef GL_ES',
'precision highp float;',
'#endif',
'uniform sampler2D uImage;',
'varying highp vec2 vTextureCoord;',
'void main (void) {',
'\tgl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);//texture2D(uImage, vTextureCoord);',
'}'
].join('\n');
Shaders.prototype.FS_Base_Image_BLUE = [
'#ifdef GL_ES',
'precision highp float;',
'#endif',
'uniform sampler2D uImage;',
'varying highp vec2 vTextureCoord;',
'void main (void) {',
'\tgl_FragColor = vec4(0.0, 0.0, 1.0, 1.0);//texture2D(uImage, vTextureCoord);',
'}'
].join('\n');`
Now I have 2 separate programs for both the fragment shader and I need to use framebuffers for mixing Red and Blue. I am not looking for mix() as the actual scenario is very complex and that's the reason I am using multiple programs with fragment shaders for avoiding conditional if/else statements.
It's not clear what you're trying to do. Framebuffers are just a list of attachments (textures and renderbuffers). You use them to render to a texture and/or renderbuffer. Then you can use the texture you just rendered to as input to some other render.
Here's an example with NO framebuffers. It blends 2 textures.
var vs = `
attribute vec4 position;
varying vec2 v_texcoord;
void main() {
gl_Position = position;
v_texcoord = position.xy * .5 + .5;
}
`;
var fs = `
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D tex1;
uniform sampler2D tex2;
void main() {
vec4 color1 = texture2D(tex1, v_texcoord);
vec4 color2 = texture2D(tex2, v_texcoord);
gl_FragColor = mix(color1, color2, 0.5);
}
`;
const gl = document.querySelector("canvas").getContext("webgl");
const program = twgl.createProgramFromSources(gl, [vs, fs]);
// make 2 textures with canvas 2d
const ctx = document.createElement("canvas").getContext("2d");
ctx.canvas.width = 64;
ctx.canvas.height = 64;
// first texture has a circle
ctx.fillStyle = "blue";
ctx.fillRect(0, 0, 64, 64);
ctx.strokeStyle = "yellow";
ctx.beginPath();
ctx.arc(32, 32, 20, 0, Math.PI * 2, false);
ctx.lineWidth = 12;
ctx.stroke();
const tex1 = createTextureFromCanvas(gl, ctx.canvas);
// second texture has a diamond (diagonal square)
ctx.fillStyle = "red";
ctx.fillRect(0, 0, 64, 64);
ctx.fillStyle = "cyan";
ctx.beginPath();
ctx.moveTo(32, 6);
ctx.lineTo(58, 32);
ctx.lineTo(32, 58);
ctx.lineTo(6, 32);
ctx.lineTo(32, 6);
ctx.fill();
const tex2 = createTextureFromCanvas(gl, ctx.canvas);
const buf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buf);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-1, -1,
1, -1,
-1, 1,
-1, 1,
1, -1,
1, 1,
]), gl.STATIC_DRAW);
const positionLoc = gl.getAttribLocation(program, "position");
gl.enableVertexAttribArray(positionLoc);
gl.vertexAttribPointer(positionLoc, 2, gl.FLOAT, false, 0, 0);
const tex1Loc = gl.getUniformLocation(program, "tex1");
const tex2Loc = gl.getUniformLocation(program, "tex2");
gl.useProgram(program);
gl.uniform1i(tex1Loc, 0);
gl.uniform1i(tex2Loc, 1);
gl.activeTexture(gl.TEXTURE0 + 0);
gl.bindTexture(gl.TEXTURE_2D, tex1);
gl.activeTexture(gl.TEXTURE0 + 1);
gl.bindTexture(gl.TEXTURE_2D, tex2);
gl.drawArrays(gl.TRIANGLES, 0, 6);
function createTextureFromCanvas(gl, canvas) {
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, ctx.canvas);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
return tex;
}
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/2.x/twgl.min.js"></script>
<canvas></canvas>
For your purpose there is no difference about the blending part, the only difference is where the textures come from. Above the textures were created by using a 2d canvas. Instead you can use framebuffer to render to a texture. AFTER you've rendered to a texture you can then use that texture in some other render just like above.
To render to a texture first you create a framebuffer
var fb = gl.createFramebuffer();
Then you attach a texture to it
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(
gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0, // attach texture as COLOR_ATTACHMENT0
gl.TEXTURE_2D, // attach a 2D texture
someTexture, // the texture to attach
0); // the mip level to render to (must be 0 in WebGL1)
Depending on your attachments you should check if they work.
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) !== gl.FRAMEBUFFER_COMPLETE) {
// these attachments don't work
}
The WebGL spec lists 3 combinations of attachments that are guaranteed to work. The example below is using one of those 3 so there's no need to check
Now if you bind the framebuffer
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
Then when you call any gl.drawXXX function or gl.clear it will be drawing to the someTexture instead of the canvas. To start drawing to the canvas again bind null
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
Remember that if the canvas and the texture are different sizes you'll need to call gl.viewport to render correctly
var vs = `
attribute vec4 position;
uniform mat4 matrix;
varying vec2 v_texcoord;
void main() {
gl_Position = matrix * position;
v_texcoord = position.xy * .5 + .5;
}
`;
var colorFS = `
precision mediump float;
uniform vec4 color;
void main() {
gl_FragColor = color;
}
`;
var mixFS = `
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D tex1;
uniform sampler2D tex2;
void main() {
// probably should use different texture coords for each
// texture for more flexibility but I'm lazy
vec4 color1 = texture2D(tex1, v_texcoord);
vec4 color2 = texture2D(tex2, v_texcoord);
gl_FragColor = mix(color1, color2, 0.5);
}
`;
const gl = document.querySelector("canvas").getContext("webgl");
const colorProgram = twgl.createProgramFromSources(gl, [vs, colorFS]);
const mixProgram = twgl.createProgramFromSources(gl, [vs, mixFS]);
// make 2 textures by attaching them to framebuffers and rendering to them
const texFbPair1 = createTextureAndFramebuffer(gl, 64, 64);
const texFbPair2 = createTextureAndFramebuffer(gl, 64, 64);
const buf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buf);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-1, -1,
1, -1,
-1, 1,
-1, 1,
1, -1,
1, 1,
]), gl.STATIC_DRAW);
function setAttributes(buf, positionLoc) {
gl.enableVertexAttribArray(positionLoc);
gl.vertexAttribPointer(positionLoc, 2, gl.FLOAT, false, 0, 0);
}
const colorPrgPositionLoc = gl.getAttribLocation(colorProgram, "position");
setAttributes(buf, colorPrgPositionLoc);
const colorLoc = gl.getUniformLocation(colorProgram, "color");
const colorProgMatrixLoc = gl.getUniformLocation(colorProgram, "matrix");
// draw red rect to first texture through the framebuffer it's attached to
gl.useProgram(colorProgram);
gl.bindFramebuffer(gl.FRAMEBUFFER, texFbPair1.fb);
gl.viewport(0, 0, 64, 64);
gl.uniform4fv(colorLoc, [1, 0, 0, 1]);
gl.uniformMatrix4fv(colorProgMatrixLoc, false, [
0.5, 0, 0, 0,
0,.25, 0, 0,
0, 0, 1, 0,
.2,.3, 0, 1,
]);
gl.drawArrays(gl.TRIANGLES, 0, 6);
// Draw a blue rect to the second texture through the framebuffer it's attached to
gl.bindFramebuffer(gl.FRAMEBUFFER, texFbPair2.fb);
gl.viewport(0, 0, 64, 64);
gl.uniform4fv(colorLoc, [0, 0, 1, 1]);
gl.uniformMatrix4fv(colorProgMatrixLoc, false, [
0.25, 0, 0, 0,
0,.5, 0, 0,
0, 0, 1, 0,
.2,.3, 0, 1,
]);
gl.drawArrays(gl.TRIANGLES, 0, 6);
// Draw both textures to the canvas
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
const mixPrgPositionLoc = gl.getAttribLocation(mixProgram, "position");
setAttributes(buf, mixPrgPositionLoc);
const mixProgMatrixLoc = gl.getUniformLocation(mixProgram, "matrix");
const tex1Loc = gl.getUniformLocation(mixProgram, "tex1");
const tex2Loc = gl.getUniformLocation(mixProgram, "tex2");
gl.useProgram(mixProgram);
gl.uniform1i(tex1Loc, 0);
gl.uniform1i(tex2Loc, 1);
gl.activeTexture(gl.TEXTURE0 + 0);
gl.bindTexture(gl.TEXTURE_2D, texFbPair1.tex);
gl.activeTexture(gl.TEXTURE0 + 1);
gl.bindTexture(gl.TEXTURE_2D, texFbPair2.tex);
gl.uniformMatrix4fv(mixProgMatrixLoc, false, [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1,
]);
gl.drawArrays(gl.TRIANGLES, 0, 6);
function createTextureAndFramebuffer(gl, width, height) {
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
return {tex: tex, fb: fb};
}
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/2.x/twgl.min.js"></script>
<canvas></canvas>
The only functional difference between the first program and the second is how the textures got their data. In the first example the textures got their data from a canvas 2d. In the 2nd example the textures got their data by rendering to them using WebGL.
As for why your example doesn't blend textures, in order to blend 2 textures you need a shader that uses two textures.