How do you texture map a plane on WebGL? - javascript

I have created a basic scene consisting of a floor(plane), table, and a cube. I now want to move onto texturing the floor with a wooden texture image Click to see the wooden texture. I know very little about texturing and fairly new to WebGL.
I have some idea of having to load the image from my directory, but not sure on how to apply it to the floor. In addition to this, I am unsure on the texture vertex coordinates needed for the plane(floor).
I am also aware I will need to add new attributes to allow texturing. Would you need to change the overall layout of the object for the texture to take place?
Would appreciate any advice on where to start and how I can go about this task.
<!DOCTYPE HTML>
<html lang="en">
<head>
<title>Drawing In 3D </title>
<meta charset="utf-8">
<script src="glMatrix.js"></script>
<script src="webgl-debug.js"></script>
<script id="shader-vs" type="x-shader/x-vertex">
attribute vec3 aVertexPosition;
attribute vec4 aVertexColor;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
varying vec4 vColor;
void main() {
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
vColor = aVertexColor;
}
</script>
<script id="shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec4 vColor;
void main() {
gl_FragColor = vColor;
}
</script>
<script type="text/javascript">
var gl;
var canvas;
var shaderProgram;
var floorVertexPositionBuffer;
var floorVertexIndexBuffer;
var cubeVertexPositionBuffer;
var cubeVertexIndexBuffer;
var modelViewMatrix;
var projectionMatrix;
var modelViewMatrixStack;
function createGLContext(canvas) {
var names = ["webgl", "experimental-webgl"];
var context = null;
for (var i = 0; i < names.length; i++) {
try {
context = canvas.getContext(names[i]);
} catch (e) { }
if (context) {
break;
}
}
if (context) {
context.viewportWidth = canvas.width;
context.viewportHeight = canvas.height;
} else {
alert("Failed to create WebGL context!");
}
return context;
}
function loadShaderFromDOM(id) {
var shaderScript = document.getElementById(id);
if (!shaderScript) {
return null;
}
var shaderSource = "";
var currentChild = shaderScript.firstChild;
while (currentChild) {
if (currentChild.nodeType == 3) { // 3 corresponds to TEXT_NODE
shaderSource += currentChild.textContent;
}
currentChild = currentChild.nextSibling;
}
var shader;
if (shaderScript.type == "x-shader/x-fragment") {
shader = gl.createShader(gl.FRAGMENT_SHADER);
} else if (shaderScript.type == "x-shader/x-vertex") {
shader = gl.createShader(gl.VERTEX_SHADER);
} else {
return null;
}
gl.shaderSource(shader, shaderSource);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert(gl.getShaderInfoLog(shader));
return null;
}
return shader;
}
function setupShaders() {
var vertexShader = loadShaderFromDOM("shader-vs");
var fragmentShader = loadShaderFromDOM("shader-fs");
shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
alert("Failed to setup shaders");
}
gl.useProgram(shaderProgram);
shaderProgram.vertexPositionAttribute = gl.getAttribLocation(shaderProgram, "aVertexPosition");
shaderProgram.vertexColorAttribute = gl.getAttribLocation(shaderProgram, "aVertexColor");
shaderProgram.uniformMVMatrix = gl.getUniformLocation(shaderProgram, "uMVMatrix");
shaderProgram.uniformProjMatrix = gl.getUniformLocation(shaderProgram, "uPMatrix");
// Initialise the matrices
modelViewMatrix = mat4.create();
projectionMatrix = mat4.create();
modelViewMatrixStack = [];
gl.enableVertexAttribArray(shaderProgram.vertexPositionAttribute);
}
function pushModelViewMatrix() {
var copyToPush = mat4.create(modelViewMatrix);
modelViewMatrixStack.push(copyToPush);
}
function popModelViewMatrix() {
if (modelViewMatrixStack.length == 0) {
throw "Error popModelViewMatrix() - Stack was empty ";
}
modelViewMatrix = modelViewMatrixStack.pop();
}
function setupFloorBuffers() {
floorVertexPositionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, floorVertexPositionBuffer);
var floorVertexPosition = [
// Plane in y=0
5.0, 0.0, 5.0, //v0
5.0, 0.0, -5.0, //v1
-5.0, 0.0, -5.0, //v2
-5.0, 0.0, 5.0]; //v3
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(floorVertexPosition), gl.STATIC_DRAW);
floorVertexPositionBuffer.itemSize = 3;
floorVertexPositionBuffer.numberOfItems = 4;
floorVertexIndexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, floorVertexIndexBuffer);
var floorVertexIndices = [0, 1, 2, 3];
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(floorVertexIndices), gl.STATIC_DRAW);
floorVertexIndexBuffer.itemSize = 1;
floorVertexIndexBuffer.numberOfItems = 4;
}
function setupCubeBuffers() {
cubeVertexPositionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, cubeVertexPositionBuffer);
var cubeVertexPosition = [
1.0, 1.0, 1.0, //v0
-1.0, 1.0, 1.0, //v1
-1.0, -1.0, 1.0, //v2
1.0, -1.0, 1.0, //v3
1.0, 1.0, -1.0, //v4
-1.0, 1.0, -1.0, //v5
-1.0, -1.0, -1.0, //v6
1.0, -1.0, -1.0, //v7
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(cubeVertexPosition), gl.STATIC_DRAW);
cubeVertexPositionBuffer.itemSize = 3;
cubeVertexPositionBuffer.numberOfItems = 8;
cubeVertexIndexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, cubeVertexIndexBuffer);
var cubeVertexIndices = [
0, 1, 2, 0, 2, 3, // Front face
4, 6, 5, 4, 7, 6, // Back face
1, 5, 6, 1, 6, 2, //left
0, 3, 7, 0, 7, 4, //right
0, 5, 1, 0, 4, 5, //top
3, 2, 6, 3, 6, 7 //bottom
];
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(cubeVertexIndices), gl.STATIC_DRAW);
cubeVertexIndexBuffer.itemSize = 1;
cubeVertexIndexBuffer.numberOfItems = 36;
}
function setupBuffers() {
setupFloorBuffers();
setupCubeBuffers();
}
function uploadModelViewMatrixToShader() {
gl.uniformMatrix4fv(shaderProgram.uniformMVMatrix, false, modelViewMatrix);
}
function uploadProjectionMatrixToShader() {
gl.uniformMatrix4fv(shaderProgram.uniformProjMatrix, false, projectionMatrix);
}
function drawFloor(r, g, b, a) {
// Disable vertex attrib array and use constant color for the floor.
gl.disableVertexAttribArray(shaderProgram.vertexColorAttribute);
// Set colour
gl.vertexAttrib4f(shaderProgram.vertexColorAttribute, r, g, b, a);
// Draw the floor
gl.bindBuffer(gl.ARRAY_BUFFER, floorVertexPositionBuffer);
gl.vertexAttribPointer(shaderProgram.vertexPositionAttribute, floorVertexPositionBuffer.itemSize, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, floorVertexIndexBuffer);
gl.drawElements(gl.TRIANGLE_FAN, floorVertexIndexBuffer.numberOfItems, gl.UNSIGNED_SHORT, 0);
}
function drawCube(r, g, b, a) {
// Disable vertex attrib array and use constant color for the cube.
gl.disableVertexAttribArray(shaderProgram.vertexColorAttribute);
// Set color
gl.vertexAttrib4f(shaderProgram.vertexColorAttribute, r, g, b, a);
gl.bindBuffer(gl.ARRAY_BUFFER, cubeVertexPositionBuffer);
gl.vertexAttribPointer(shaderProgram.vertexPositionAttribute, cubeVertexPositionBuffer.itemSize, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, cubeVertexIndexBuffer);
gl.drawElements(gl.TRIANGLES, cubeVertexIndexBuffer.numberOfItems, gl.UNSIGNED_SHORT, 0);
}
function drawTable() {
// Draw table top
pushModelViewMatrix();
mat4.translate(modelViewMatrix, [0.0, 1.0, 0.0], modelViewMatrix);
mat4.scale(modelViewMatrix, [2.0, 0.1, 2.0], modelViewMatrix);
uploadModelViewMatrixToShader();
// Draw the scaled cube
drawCube(0.72, 0.53, 0.04, 1.0); // brown color
popModelViewMatrix();
// Draw table legs
for (var i = -1; i <= 1; i += 2) {
for (var j = -1; j <= 1; j += 2) {
pushModelViewMatrix();
mat4.translate(modelViewMatrix, [i * 1.9, -0.1, j * 1.9], modelViewMatrix);
mat4.scale(modelViewMatrix, [0.1, 1.0, 0.1], modelViewMatrix);
uploadModelViewMatrixToShader();
drawCube(0.72, 0.53, 0.04, 1.0); // argument sets brown color
popModelViewMatrix();
}
}
}
function draw() {
gl.viewport(0, 0, gl.viewportWidth, gl.viewportHeight);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
mat4.perspective(60, gl.viewportWidth / gl.viewportHeight, 0.1, 100.0, projectionMatrix);
mat4.identity(modelViewMatrix);
mat4.lookAt([8, 5, -10], [0, 0, 0], [0, 1, 0], modelViewMatrix);
uploadModelViewMatrixToShader();
uploadProjectionMatrixToShader();
// Draw floor in red color
drawFloor(1.0, 0.0, 0.0, 1.0);
// Draw table
pushModelViewMatrix();
mat4.translate(modelViewMatrix, [0.0, 1.1, 0.0], modelViewMatrix);
uploadModelViewMatrixToShader();
drawTable(); //Call drawTable() function
popModelViewMatrix();
// Draw box on top of the table
pushModelViewMatrix();
mat4.translate(modelViewMatrix, [0.0, 2.7, 0.0], modelViewMatrix);
mat4.scale(modelViewMatrix, [0.5, 0.5, 0.5], modelViewMatrix);
uploadModelViewMatrixToShader();
drawCube(0.0, 0.0, 1.0, 1.0);
popModelViewMatrix()
}
function startup() {
canvas = document.getElementById("myGLCanvas");
gl = WebGLDebugUtils.makeDebugContext(createGLContext(canvas));
setupShaders();
setupBuffers();
gl.clearColor(1.0, 1.0, 1.0, 1.0);
gl.enable(gl.DEPTH_TEST);
draw();
}
</script>
</head>
<body onload="startup();">
<canvas id="myGLCanvas" width="500" height="500"></canvas>
</body>
</html>

You can use loadTexture() method to load textures in WebGL project.
The loadTexture() routine starts by creating a WebGL texture object
texture by calling the WebGL createTexture() function. It then uploads
a single blue pixel using texImage2D(). This makes the texture
immediately usable as a solid blue color even though it may take a few
moments for our image to download.
To load the texture from the image file, it then creates an Image
object and assigned the src to the URL for our image we wish to use as
our texture. The function we assign to image.onload will be called
once the image has finished downloading. At that point, we again call
texImage2D() this time using the image as the source of the texture.
After that we setup filtering and wrapping for the texture based on
whether or not the image we download was a power of 2 in both
dimensions or not.
function loadTexture(gl, url) {
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
// Because images have to be download over the internet
// they might take a moment until they are ready.
// Until then put a single pixel in the texture so we can
// use it immediately. When the image has finished downloading
// we'll update the texture with the contents of the image.
const level = 0;
const internalFormat = gl.RGBA;
const width = 1;
const height = 1;
const border = 0;
const srcFormat = gl.RGBA;
const srcType = gl.UNSIGNED_BYTE;
const pixel = new Uint8Array([0, 0, 255, 255]); // opaque blue
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
width, height, border, srcFormat, srcType,
pixel);
const image = new Image();
image.onload = function() {
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
srcFormat, srcType, image);
// WebGL1 has different requirements for power of 2 images
// vs non power of 2 images so check if the image is a
// power of 2 in both dimensions.
if (isPowerOf2(image.width) && isPowerOf2(image.height)) {
// Yes, it's a power of 2. Generate mips.
gl.generateMipmap(gl.TEXTURE_2D);
} else {
// No, it's not a power of 2. Turn of mips and set
// wrapping to clamp to edge
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
}
};
image.src = url;
return texture;
}
Or have a look at this cube with texture WebGL tutorial on git hub.

Related

WebGL Rotating cube overriding faces

I am creating a simple WebGL program that creates a cube rotating around the Z axis. The problem is that when its rotating some sides of it override the others and you don't see each individual side of the cube. I looked for a solution online but it makes it worse. They enable the face culling and for them, it seems to work but for me, unfortunately, it doesn't. Can anyone tell me what do I need to add into my code in order to fix the face overriding?
Full Source Code:
var canvas = document.getElementById("canvas");
var gl = canvas.getContext("webgl");
var a = 24;
var positions = [
-0.5, -0.5, 0.0,
0.5, -0.5, 0.0,
0.5, 0.5, 0.0,
-0.5, 0.5, 0.0,
-0.5, -0.5, 0.5,
0.5, -0.5, 0.5,
0.5, 0.5, 0.5,
-0.5, 0.5, 0.5,
//0.5, 0.5, 1.0,
];
var indices = [
0, 1, 2, 0, 2, 3, // Front
0, 4, 7, 0, 3, 7, // Side 1
1, 5, 6, 1, 2, 6, // Side 2
4, 5, 6, 6, 7, 4 // Back
];
var rotation_angle = 1;
var radians = (rotation_angle * Math.PI)/180;
var rotation = [Math.sin(radians), 0.0, Math.cos(radians)];
var _buffer = gl.createBuffer();
var i_buffer = gl.createBuffer();
var vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, `
precision mediump float;
attribute vec3 position;
uniform vec3 rotation;
varying vec3 f_col;
void main(){
vec2 newPos = vec2(
position.x * rotation.x - position.z * rotation.z,
position.x * rotation.z + position.z * rotation.x
);
gl_Position = vec4(newPos.x, position.y, newPos.y, 1.0);
f_col = position;
}
`);
gl.compileShader(vertexShader);
// Check if it compiled
var success2 = gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS);
if (!success2) {
// Something went wrong during compilation; get the error
throw "could not compile shader:" + gl.getShaderInfoLog(vertexShader);
}
var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, `
precision mediump float;
varying vec3 f_col;
void main() {
gl_FragColor = vec4(f_col.x, 0.2, f_col.z, 1.0);
}
`);
gl.compileShader(fragmentShader);
var success1 = gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS);
if (!success1) {
// Something went wrong during compilation; get the error
throw "could not compile shader:" + gl.getShaderInfoLog(fragmentShader);
}
var program = gl.createProgram();
// Attach pre-existing shaders
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
var attributeLoc = gl.getAttribLocation(program, "position");
gl.bindBuffer(gl.ARRAY_BUFFER, _buffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions),
gl.STATIC_DRAW);
gl.vertexAttribPointer(attributeLoc, 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(attributeLoc);
gl.useProgram(program);
var rotation_location = gl.getUniformLocation(program, "rotation");
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, i_buffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(indices),
gl.STATIC_DRAW);
function loop() {
gl.clearColor(0.75, 0.85, 0.8, 1);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
//gl.drawArrays(gl.TRIANGLE_FAN, 0, 8);
gl.drawElements(gl.TRIANGLES, a, gl.UNSIGNED_SHORT, 0);
//gl.drawArrays(gl.TRIANGLE_STRIP, 1, 3);
rotation_angle++;
var radians = (rotation_angle * Math.PI)/180;
var rotation = [Math.sin(radians), 0.0, Math.cos(radians)];
gl.uniform3fv(rotation_location, rotation);
requestAnimationFrame(loop);
}
loop();
<canvas id="canvas" width="500" height="500"></canvas>

How to work with framebuffers in webgl?

I have been trying to understand framebuffer in WebGL/OpenGL-ES.
I know that we can blend multiple textures using framebuffer.
So, to understand that I wrote a sample by taking a 1*1 texture and tried to apply framebuffer logic on top of it.
But , it didn't work.
See snippet at bottom, if you click on "mix red and blue", the images doesn't get rendered, am I doing anything wrong?
Code :
`
var canvas, gl, attrPosition, texture, program, vertexBuffer, textureBuffer, vertices, texVertices, attrPos, attrTexPos, textures = [], framebuffers = [];
canvas = document.getElementById('canvas');
gl = getWebGL();
vertices = new Float32Array([
-1.0, -1.0,
1.0, -1.0,
1.0, 1.0,
-1.0, 1.0,
-1.0, -1.0,
]);
texVertices = new Float32Array([
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
0.0, 0.0
]);
var getProgram = function () {
var vs = createVertexShader([
'attribute vec2 attrPos;',
'attribute vec2 attrTexPos;',
'varying highp vec2 vTexCoord;',
'void main() {',
'\tgl_Position = vec4(attrPos, 0.0, 1.0);',
'}'
].join('\n'));
var fs = createFragmentShader([
'varying highp vec2 vTexCoord;',
'uniform sampler2D uImage;',
'void main() {',
'\tgl_FragColor = texture2D(uImage, vTexCoord);',
'}'
].join('\n'));
return createAndLinkPrograms(vs, fs);
};
var render = function () {
gl.clear(gl.DEPTH_BUFFER_BIT|gl.COLOR_BUFFER_BIT);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.vertexAttribPointer(attrPos, 2, gl.FLOAT, gl.FALSE, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, textureBuffer);
gl.vertexAttribPointer(attrTexPos, 2, gl.FLOAT, gl.FALSE, 0, 0);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 5);
};
if (gl) {
gl.clearColor(0.1, 0.5, 1.0, 1.0);
render();
program = getProgram();
texture = createAndSetupTexture();
vertexBuffer = createAndBindBuffer(vertices, gl.ARRAY_BUFFER);
attrPos = gl.getUniformLocation(program, 'attrPos');
gl.enableVertexAttribArray(attrPos);
textureBuffer = createAndBindBuffer(texVertices, gl.ARRAY_BUFFER);
attrTexPos = gl.getUniformLocation(program, 'attrTexPos');
gl.enableVertexAttribArray(attrTexPos);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([123, 0, 60, 255]));
render();
}
var initPingPongTextures = function(textures, framebuffers) {
for (var i = 0; i < 2; ++i) {
var tex = createAndSetupTexture(gl);
textures.push(tex);
// make the texture the same size as the image
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
// Create a framebuffer
var fbo = gl.createFramebuffer();
framebuffers.push(fbo);
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
// Attach a texture to it.
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
}
}
var setFramebuffer = function(fbo, width, height) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
gl.viewport(0, 0, width, height);
};
var mixRedAndBlue = function () {
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
setFramebuffer(framebuffers[0], 1, 1);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([255, 0, 0, 255]));
render();
gl.bindTexture(gl.TEXTURE_2D, textures[0]);
setFramebuffer(framebuffers[1], 1, 1);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0, 255, 0, 255]));
render();
gl.bindTexture(gl.TEXTURE_2D, textures[1]);
setFramebuffer(null, 1, 1);
render();
};`
var getWebGLContext = function(canvas) {
var webglContextParams = ['webgl', 'experimental-webgl', 'webkit-3d', 'moz-webgl'];
var webglContext = null;
for (var index = 0; index < webglContextParams.length; index++) {
try {
webglContext = canvas.getContext(webglContextParams[index]);
if(webglContext) {
//breaking as we got our context
break;
}
} catch (E) {
console.log(E);
}
}
if(webglContext === null) {
alert('WebGL is not supported on your browser.');
} else {
//WebGL is supported in your browser, lets render the texture
}
fillGLForCleanUp(webglContext);
return webglContext;
}
var createVertexShader = function (vertexShaderSource) {
console.log(vertexShaderSource);
var vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertexShaderSource);
gl.compileShader(vertexShader);
return vertexShader;
}
var createFragmentShader = function (fragmentShaderSource) {
console.log(fragmentShaderSource);
var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragmentShaderSource);
gl.compileShader(fragmentShader);
return fragmentShader;
}
var createAndLinkPrograms = function (vertexShader, fragmentShader) {
var program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
alert('Could not initialise shaders');
}
gl.useProgram(program);
return program;
}
var createAndBindBuffer = function (verticesOrIndices, bufferType) {
var buffer = gl.createBuffer();
gl.bindBuffer(bufferType, buffer);
gl.bufferData(bufferType, verticesOrIndices, gl.STATIC_DRAW);
//clear memory
gl.bindBuffer(bufferType, null);
return buffer;
}
var allowAllImageSizes = function() {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
// gl.bindTexture(gl.TEXTURE_2D, null);
}
var createAndSetupTexture = function() {
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
allowAllImageSizes();
gl.textures.push(texture);
return texture;
}
var getWebGL = function (canvas, width, height) {
if(!canvas) {
canvas = document.createElement('canvas');
canvas.id = 'canvas';
canvas.width = !width ? 512 : width;
canvas.height = !height ? 512 : height;
document.body.appendChild(canvas);
} else {
canvas.width = !width ? 512 : width;
canvas.height = !height ? 512 : height;
}
return getWebGLContext(canvas);
}
var fillGLForCleanUp = function (gl) {
gl.textures = [];
gl.framebuffers = [];
gl.array_buffer = [];
gl.element_array_buffers = [];
}
var canvas, gl, attrPosition, texture, program,
vertexBuffer, textureBuffer, vertices, texVertices,
attrPos, attrTexPos, textures = [], framebuffers = [];
canvas = document.getElementById('canvas');
gl = getWebGL(canvas);
vertices = new Float32Array([
-1.0, -1.0,
1.0, -1.0,
1.0, 1.0,
-1.0, 1.0,
-1.0, -1.0,
]);
texVertices = new Float32Array([
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
0.0, 0.0
]);
var getProgram = function () {
var vs = createVertexShader([
'attribute vec2 attrPos;',
'attribute vec2 attrTexPos;',
'varying highp vec2 vTexCoord;',
'void main() {',
'\tgl_Position = vec4(attrPos, 0.0, 1.0);',
'}'
].join('\n'));
var fs = createFragmentShader([
'varying highp vec2 vTexCoord;',
'uniform sampler2D uImage;',
'void main() {',
'\tgl_FragColor = texture2D(uImage, vTexCoord);',
'}'
].join('\n'));
return createAndLinkPrograms(vs, fs);
};
var render = function () {
gl.clear(gl.DEPTH_BUFFER_BIT|gl.COLOR_BUFFER_BIT);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.vertexAttribPointer(attrPos, 2, gl.FLOAT, gl.FALSE, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, textureBuffer);
gl.vertexAttribPointer(attrTexPos, 2, gl.FLOAT, gl.FALSE, 0, 0);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 5);
};
if (gl) {
gl.clearColor(0.1, 0.5, 1.0, 1.0);
render();
program = getProgram();
texture = createAndSetupTexture();
vertexBuffer = createAndBindBuffer(vertices, gl.ARRAY_BUFFER);
attrPos = gl.getUniformLocation(program, 'attrPos');
gl.enableVertexAttribArray(attrPos);
textureBuffer = createAndBindBuffer(texVertices, gl.ARRAY_BUFFER);
attrTexPos = gl.getUniformLocation(program, 'attrTexPos');
gl.enableVertexAttribArray(attrTexPos);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([123, 0, 60, 255]));
render();
}
var initPingPongTextures = function(textures, framebuffers) {
for (var i = 0; i < 2; ++i) {
var tex = createAndSetupTexture(gl);
textures.push(tex);
// make the texture the same size as the image
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
// Create a framebuffer
var fbo = gl.createFramebuffer();
framebuffers.push(fbo);
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
// Attach a texture to it.
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
}
}
var setFramebuffer = function(fbo, width, height) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
gl.viewport(0, 0, width, height);
};
var mixRedAndBlue = function () {
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
setFramebuffer(framebuffers[0], 1, 1);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([255, 0, 0, 255]));
render();
gl.bindTexture(gl.TEXTURE_2D, textures[0]);
setFramebuffer(framebuffers[1], 1, 1);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0, 255, 0, 255]));
render();
gl.bindTexture(gl.TEXTURE_2D, textures[1]);
setFramebuffer(null, 1, 1);
render();
};
<button id="redImg" onclick="mixRedAndBlue()">Mix Red and blue</button><hr/>
<canvas id="canvas" width=512 height=512></canvas>
Edit 1 :
I am trying to achieve the same for multiple programs with multiple fragment shaders because having if/else statements within the fragment shader is not recommended as it runs for each pixel.
`
Shaders.prototype.VS_Base = [
'attribute vec3 verticesPosition;',
'attribute vec2 texturePosition;',
'varying highp vec2 vTextureCoord;',
'void main(void) {',
'\tgl_Position = vec4(verticesPosition * vec3(1.0, -1.0, 1.0), 0.5);',
'\tvTextureCoord = texturePosition;',
'}'
].join('\n');
Shaders.prototype.FS_Base_Image_RED = [
'#ifdef GL_ES',
'precision highp float;',
'#endif',
'uniform sampler2D uImage;',
'varying highp vec2 vTextureCoord;',
'void main (void) {',
'\tgl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);//texture2D(uImage, vTextureCoord);',
'}'
].join('\n');
Shaders.prototype.FS_Base_Image_BLUE = [
'#ifdef GL_ES',
'precision highp float;',
'#endif',
'uniform sampler2D uImage;',
'varying highp vec2 vTextureCoord;',
'void main (void) {',
'\tgl_FragColor = vec4(0.0, 0.0, 1.0, 1.0);//texture2D(uImage, vTextureCoord);',
'}'
].join('\n');`
Now I have 2 separate programs for both the fragment shader and I need to use framebuffers for mixing Red and Blue. I am not looking for mix() as the actual scenario is very complex and that's the reason I am using multiple programs with fragment shaders for avoiding conditional if/else statements.
It's not clear what you're trying to do. Framebuffers are just a list of attachments (textures and renderbuffers). You use them to render to a texture and/or renderbuffer. Then you can use the texture you just rendered to as input to some other render.
Here's an example with NO framebuffers. It blends 2 textures.
var vs = `
attribute vec4 position;
varying vec2 v_texcoord;
void main() {
gl_Position = position;
v_texcoord = position.xy * .5 + .5;
}
`;
var fs = `
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D tex1;
uniform sampler2D tex2;
void main() {
vec4 color1 = texture2D(tex1, v_texcoord);
vec4 color2 = texture2D(tex2, v_texcoord);
gl_FragColor = mix(color1, color2, 0.5);
}
`;
const gl = document.querySelector("canvas").getContext("webgl");
const program = twgl.createProgramFromSources(gl, [vs, fs]);
// make 2 textures with canvas 2d
const ctx = document.createElement("canvas").getContext("2d");
ctx.canvas.width = 64;
ctx.canvas.height = 64;
// first texture has a circle
ctx.fillStyle = "blue";
ctx.fillRect(0, 0, 64, 64);
ctx.strokeStyle = "yellow";
ctx.beginPath();
ctx.arc(32, 32, 20, 0, Math.PI * 2, false);
ctx.lineWidth = 12;
ctx.stroke();
const tex1 = createTextureFromCanvas(gl, ctx.canvas);
// second texture has a diamond (diagonal square)
ctx.fillStyle = "red";
ctx.fillRect(0, 0, 64, 64);
ctx.fillStyle = "cyan";
ctx.beginPath();
ctx.moveTo(32, 6);
ctx.lineTo(58, 32);
ctx.lineTo(32, 58);
ctx.lineTo(6, 32);
ctx.lineTo(32, 6);
ctx.fill();
const tex2 = createTextureFromCanvas(gl, ctx.canvas);
const buf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buf);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-1, -1,
1, -1,
-1, 1,
-1, 1,
1, -1,
1, 1,
]), gl.STATIC_DRAW);
const positionLoc = gl.getAttribLocation(program, "position");
gl.enableVertexAttribArray(positionLoc);
gl.vertexAttribPointer(positionLoc, 2, gl.FLOAT, false, 0, 0);
const tex1Loc = gl.getUniformLocation(program, "tex1");
const tex2Loc = gl.getUniformLocation(program, "tex2");
gl.useProgram(program);
gl.uniform1i(tex1Loc, 0);
gl.uniform1i(tex2Loc, 1);
gl.activeTexture(gl.TEXTURE0 + 0);
gl.bindTexture(gl.TEXTURE_2D, tex1);
gl.activeTexture(gl.TEXTURE0 + 1);
gl.bindTexture(gl.TEXTURE_2D, tex2);
gl.drawArrays(gl.TRIANGLES, 0, 6);
function createTextureFromCanvas(gl, canvas) {
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, ctx.canvas);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
return tex;
}
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/2.x/twgl.min.js"></script>
<canvas></canvas>
For your purpose there is no difference about the blending part, the only difference is where the textures come from. Above the textures were created by using a 2d canvas. Instead you can use framebuffer to render to a texture. AFTER you've rendered to a texture you can then use that texture in some other render just like above.
To render to a texture first you create a framebuffer
var fb = gl.createFramebuffer();
Then you attach a texture to it
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(
gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0, // attach texture as COLOR_ATTACHMENT0
gl.TEXTURE_2D, // attach a 2D texture
someTexture, // the texture to attach
0); // the mip level to render to (must be 0 in WebGL1)
Depending on your attachments you should check if they work.
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) !== gl.FRAMEBUFFER_COMPLETE) {
// these attachments don't work
}
The WebGL spec lists 3 combinations of attachments that are guaranteed to work. The example below is using one of those 3 so there's no need to check
Now if you bind the framebuffer
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
Then when you call any gl.drawXXX function or gl.clear it will be drawing to the someTexture instead of the canvas. To start drawing to the canvas again bind null
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
Remember that if the canvas and the texture are different sizes you'll need to call gl.viewport to render correctly
var vs = `
attribute vec4 position;
uniform mat4 matrix;
varying vec2 v_texcoord;
void main() {
gl_Position = matrix * position;
v_texcoord = position.xy * .5 + .5;
}
`;
var colorFS = `
precision mediump float;
uniform vec4 color;
void main() {
gl_FragColor = color;
}
`;
var mixFS = `
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D tex1;
uniform sampler2D tex2;
void main() {
// probably should use different texture coords for each
// texture for more flexibility but I'm lazy
vec4 color1 = texture2D(tex1, v_texcoord);
vec4 color2 = texture2D(tex2, v_texcoord);
gl_FragColor = mix(color1, color2, 0.5);
}
`;
const gl = document.querySelector("canvas").getContext("webgl");
const colorProgram = twgl.createProgramFromSources(gl, [vs, colorFS]);
const mixProgram = twgl.createProgramFromSources(gl, [vs, mixFS]);
// make 2 textures by attaching them to framebuffers and rendering to them
const texFbPair1 = createTextureAndFramebuffer(gl, 64, 64);
const texFbPair2 = createTextureAndFramebuffer(gl, 64, 64);
const buf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buf);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-1, -1,
1, -1,
-1, 1,
-1, 1,
1, -1,
1, 1,
]), gl.STATIC_DRAW);
function setAttributes(buf, positionLoc) {
gl.enableVertexAttribArray(positionLoc);
gl.vertexAttribPointer(positionLoc, 2, gl.FLOAT, false, 0, 0);
}
const colorPrgPositionLoc = gl.getAttribLocation(colorProgram, "position");
setAttributes(buf, colorPrgPositionLoc);
const colorLoc = gl.getUniformLocation(colorProgram, "color");
const colorProgMatrixLoc = gl.getUniformLocation(colorProgram, "matrix");
// draw red rect to first texture through the framebuffer it's attached to
gl.useProgram(colorProgram);
gl.bindFramebuffer(gl.FRAMEBUFFER, texFbPair1.fb);
gl.viewport(0, 0, 64, 64);
gl.uniform4fv(colorLoc, [1, 0, 0, 1]);
gl.uniformMatrix4fv(colorProgMatrixLoc, false, [
0.5, 0, 0, 0,
0,.25, 0, 0,
0, 0, 1, 0,
.2,.3, 0, 1,
]);
gl.drawArrays(gl.TRIANGLES, 0, 6);
// Draw a blue rect to the second texture through the framebuffer it's attached to
gl.bindFramebuffer(gl.FRAMEBUFFER, texFbPair2.fb);
gl.viewport(0, 0, 64, 64);
gl.uniform4fv(colorLoc, [0, 0, 1, 1]);
gl.uniformMatrix4fv(colorProgMatrixLoc, false, [
0.25, 0, 0, 0,
0,.5, 0, 0,
0, 0, 1, 0,
.2,.3, 0, 1,
]);
gl.drawArrays(gl.TRIANGLES, 0, 6);
// Draw both textures to the canvas
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
const mixPrgPositionLoc = gl.getAttribLocation(mixProgram, "position");
setAttributes(buf, mixPrgPositionLoc);
const mixProgMatrixLoc = gl.getUniformLocation(mixProgram, "matrix");
const tex1Loc = gl.getUniformLocation(mixProgram, "tex1");
const tex2Loc = gl.getUniformLocation(mixProgram, "tex2");
gl.useProgram(mixProgram);
gl.uniform1i(tex1Loc, 0);
gl.uniform1i(tex2Loc, 1);
gl.activeTexture(gl.TEXTURE0 + 0);
gl.bindTexture(gl.TEXTURE_2D, texFbPair1.tex);
gl.activeTexture(gl.TEXTURE0 + 1);
gl.bindTexture(gl.TEXTURE_2D, texFbPair2.tex);
gl.uniformMatrix4fv(mixProgMatrixLoc, false, [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1,
]);
gl.drawArrays(gl.TRIANGLES, 0, 6);
function createTextureAndFramebuffer(gl, width, height) {
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
return {tex: tex, fb: fb};
}
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/2.x/twgl.min.js"></script>
<canvas></canvas>
The only functional difference between the first program and the second is how the textures got their data. In the first example the textures got their data from a canvas 2d. In the 2nd example the textures got their data by rendering to them using WebGL.
As for why your example doesn't blend textures, in order to blend 2 textures you need a shader that uses two textures.

Texturing a pyramid in WebGL

I am working on a textured pyramid, but I got some warnings. I did it by step by step from the book "Beginning WebGL for HTML5" and it does not work.
Here is my code:
<!doctype html>
<html>
<head>
<title>Project 2</title>
<script type="text/javascript" src="gl-matrix-min.js"></script>
<style>
body{ background-color: grey; }
canvas{ background-color: white; }
</style>
<script id="shader-vs" type="x-shader/x-vertex">
attribute vec4 aVertexPosition;
attribute vec4 aVertexColor;
attribute vec2 aVertexTextureCoord;
varying highp vec2 vTextureCoord;
varying vec4 vColor;
/*
Couting On GPU
// Model matrix
uniform mat4 uMVMatrix;
// Projection matrix
uniform mat4 uPMatrix;
// View matrix
uniform mat4 uVMatrix;
*/
uniform mat4 uPVMatrix;
void main(void) {
vColor = aVertexColor;
gl_Position = uPVMatrix * aVertexPosition;
vTextureCoord = aVertexTextureCoord;
// gl_Position = uPMatrix * uVMatrix * uMVMatrix * aVertexPosition;
}
</script>
<script id="shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec4 vColor;
varying highp vec2 vTextureCoord;
uniform sampler2D uSampler;
void main(void) {
gl_FragColor = texture2D(uSampler, vTextureCoord);
}
</script>
<script>
var gl = null,
canvas = null,
glProgram = null,
fragmentShader = null,
vertexShader = null;
var coordinateArray = [],
triangleVerticeColors = [],
verticesArray = [],
verticesIndexArray = [],
triangleTexCoords = [];
var vertexPositionAttribute = null,
trianglesVerticeBuffer = null,
vertexColorAttribute = null,
trianglesColorBuffer = null,
triangleVerticesIndexBuffer = null,
vertexTexCoordAttribute = null,
trianglesTexCoordBuffer = null;
var P = mat4.create(),
V = mat4.create(),
M = mat4.create(),
VM = mat4.create(),
PVM = mat4.create();
var uPVMMatrix;
var texture;
var textureImage = null;
function initWebGL() {
canvas = document.getElementById("my-canvas");
try {
gl = canvas.getContext("webgl") ||
canvas.getContext("experimental-webgl");
}catch(e){ }
if(gl) {
setupWebGL();
initShaders();
setupTexture();
setupBuffers();
//getMatrixUniforms();
//setMatrixUniforms();
//animationLoop();
drawScene();
}else{
alert( "Error: Your browser does not appear to" + "support WebGL.");
}
}
function animationLoop() {
var R = mat4.create();
var angle = 0;
var i =0;
var loop = function() {
angle = performance.now() / 1000 / 6 * 2 * Math.PI;
i++;
mat4.rotate(PVM, R, angle, [0, 1, 0]);
gl.uniformMatrix4fv(uPVMMatrix, false, PVM);
gl.clearColor(0.0, 0.0, 0.0, 0.0);
gl.clear(gl.DEPTH_BUFFER_BIT | gl.COLOR_BUFFER_BIT);
drawScene();
requestAnimationFrame(loop);
};
requestAnimationFrame(loop);
}
function setupWebGL() {
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
gl.frontFace(gl.CW);
gl.cullFace(gl.BACK);
gl.clearColor(0.0, 0.0, 0.0, 0.0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
console.log(P);
console.log(V);
console.log(M);
// mat4.identity(M);
mat4.lookAt(V, [5, 0, -5], [0, 0, 0], [0, 1, 0]);
mat4.perspective(P, glMatrix.toRadian(45), canvas.width / canvas.height, 0.1, 1000.0);
mat4.multiply(VM,V,M);
mat4.multiply(PVM,P,VM);
}
function initShaders() {
var fs_source = document.getElementById('shader-fs').innerHTML,
vs_source = document.getElementById('shader-vs').innerHTML;
vertexShader = makeShader(vs_source, gl.VERTEX_SHADER);
fragmentShader = makeShader(fs_source, gl.FRAGMENT_SHADER);
glProgram = gl.createProgram();
gl.attachShader(glProgram, vertexShader);
gl.attachShader(glProgram, fragmentShader);
gl.linkProgram(glProgram);
if (!gl.getProgramParameter(glProgram, gl.LINK_STATUS)) {
alert("Unable to initialize the shader program.");
}
gl.useProgram(glProgram);
uPVMMatrix = gl.getUniformLocation(glProgram, "uPVMatrix");
gl.uniformMatrix4fv(uPVMMatrix, false, PVM);
}
function loadTexture() {
textureImage = $("#troll").get(0);
setupTexture();
}
function setupTexture() {
texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNISGNED_BYTE, textureImage);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
glProgram.sampleUniform = gl.getUniformLocation(glProgram, "uSampler");
gl.uniform1i(glProgram.sampleUniform, 0);
if(!gl.isTexture(texture)) {
console.log("Error : Texture is invalid");
}
}
function makeShader(src, type) {
var shader = gl.createShader(type);
gl.shaderSource(shader, src);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert("Error compiling shader: " + gl.getShaderInfoLog(shader));
}
return shader;
}
function setupBuffers() {
// n-sides polygon
var n = 6;
var radius = 1;
var angle = (Math.PI * 2) / n;
var xCoordinate = 0;
var yCoordinate = 0;
for(var i = 0 ; i < n ; i++) {
var a = angle * i;
var xNewCoordinate = xCoordinate + radius * Math.cos(a);
var yNewCoordinate = yCoordinate + radius * Math.sin(a);
var zNewCoordinate = 0;
coordinateArray.push(xNewCoordinate);
coordinateArray.push(yNewCoordinate);
coordinateArray.push(zNewCoordinate);
}
verticesArray = [
//Bottom Face
0.0, 0.0, 0.0,
0.0, 0.0, -1.0,
1.0, 0.0, -1.0,
0.0, 0.0, 0.0,
1.0, 0.0, -1.0,
1.0, 0.0, 0.0,
//Front Face
0.0, 0.0, 0.0,
1.0, 0.0, 0.0,
0.5, 1.0, -0.5,
//Right Face
1.0, 0.0, 0.0,
1.0, 0.0, -1.0,
0.5, 1.0, -0.5,
//Back Face
1.0, 0.0, -1.0,
0.0, 0.0, -1.0,
0.5, 1.0, -0.5,
//Left Face
0.0, 0.0, -1.0,
0.0, 0.0, 0.0,
0.5, 1.0, -0.5,
];
trianglesVerticeBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, trianglesVerticeBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verticesArray), gl.STATIC_DRAW);
verticesIndexArray = [
0, 1, 2,
3, 4, 5,
6, 7, 8,
9, 10, 11,
12, 13, 14,
15, 16, 17,
];
triangleVerticesIndexBuffer = gl.createBuffer();
triangleVerticesIndexBuffer.number_vertext_points = verticesIndexArray.length;
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, triangleVerticesIndexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(verticesIndexArray), gl.STATIC_DRAW);
triangleTexCoords = [
0.5000, 0.1910,
0.1910, 0.5000,
0.5000, 0.8090,
0.5000, 0.1910,
0.5000, 0.8090,
0.8090, 0.5000,
0.5000, 0.1910,
0.8090, 0.5000,
1.0000, 0.0000,
0.8090, 0.5000,
0.5000, 0.8090,
1.0000, 1.0000,
0.5000, 0.8090,
0.1910, 0.5000,
0.0000, 1.0000,
0.1910, 0.5000,
0.5000, 0.1910,
0.0000, 0.0000,
];
trianglesTexCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, trianglesTexCoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(triangleTexCoords), gl.STATIC_DRAW);
triangleVerticeColors = [
// Bottom quad
0.470, 0.796, 0.886,
0.470, 0.796, 0.886,
0.470, 0.796, 0.886,
0.470, 0.796, 0.886,
0.470, 0.796, 0.886,
0.470, 0.796, 0.886,
// Back triangle
0.772, 0.470, 0.886,
0.772, 0.470, 0.886,
0.772, 0.470, 0.886,
// Left triangle
0.886, 0.552, 0.470,
0.886, 0.552, 0.470,
0.886, 0.552, 0.470,
// Front triangle
0.886, 0.882, 0.470,
0.886, 0.882, 0.470,
0.886, 0.882, 0.470,
// Right triangle
0.470, 0.886, 0.505,
0.470, 0.886, 0.505,
0.470, 0.886, 0.505,
];
trianglesColorBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, trianglesColorBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(triangleVerticeColors), gl.STATIC_DRAW);
}
// GPU
function getMatrixUniforms() {
glProgram.mvMatrixUniform = gl.getUniformLocation(glProgram, "uMVMatrix");
glProgram.pMatrixUniform = gl.getUniformLocation(glProgram, "uPMatrix");
glProgram.vMatrixUniform = gl.getUniformLocation(glProgram, "uVMatrix");
}
// GPU
function setMatrixUniforms() {
gl.uniformMatrix4fv(glProgram.mvMatrixUniform, false, M);
gl.uniformMatrix4fv(glProgram.pMatrixUniform, false, P);
gl.uniformMatrix4fv(glProgram.vMatrixUniform, false, V);
}
function drawScene() {
vertexPositionAttribute = gl.getAttribLocation(glProgram, "aVertexPosition");
gl.enableVertexAttribArray(vertexPositionAttribute);
gl.bindBuffer(gl.ARRAY_BUFFER, trianglesVerticeBuffer);
gl.vertexAttribPointer(vertexPositionAttribute, 3, gl.FLOAT, false, 0, 0);
vertexTexCoordAttribute = gl.getAttribLocation(glProgram, "aVertexTexCoord");
gl.enableVertexAttribArray(vertexTexCoordAttribute);
gl.bindBuffer(gl.ARRAY_BUFFER, trianglesTexCoordBuffer);
gl.vertexAttribPointer(vertexTexCoordAttribute, 2, gl.FLOAT, false, 0, 0);
/*vertexColorAttribute = gl.getAttribLocation(glProgram, "aVertexColor");
gl.enableVertexAttribArray(vertexColorAttribute);
gl.bindBuffer(gl.ARRAY_BUFFER, trianglesColorBuffer);
gl.vertexAttribPointer(vertexColorAttribute, 3, gl.FLOAT, false, 0, 0);
*/
gl.drawElements(gl.TRIANGLE_STRIP, triangleVerticesIndexBuffer.number_vertext_points, gl.UNSIGNED_SHORT, 0);
}
</script>
</head>
<body onload="initWebGL()">
<canvas id="my-canvas" width="800" height="600">
Your browser does not support the HTML5 canvas element.
</canvas>
<img src="./trollface.png" id="troll" />
</body>
</html>
The texture coordinates I used looks like this:
Here is the texture:
The warnings are about out of range arrays and about the image not loading.
You have multiple errors in your code.
Your function loadTexture() is never called and so the texture is actually never loaded.
Replace setupTexture() with loadTexture() inside initWebGL().
You are also seem to be using jQuery to retrieve the image from DOM, but you didn't load the library.
Replace $("#troll").get(0) with document.getElementById("troll") inside loadTexture().
The dimensions of the texture needs to be a power of 2 (128x32, 256x256, 512x1024, ...)
You should resize your image to 256x256.
You made a typo with one of the parameters for texImage2D().
Replace gl.UNISGNED_BYTE with gl.UNSIGNED_BYTE inside your gl.texImage2D() call.
Your names for he texture coordinates attribute don't match between your gl.getAttribLocation() call and your vertex shader code.
Replace aVertexTexCoord with aVertexTextureCoord inside your gl.getAttribLocation() call.
If you are rendering individual triangles replace gl.TRIANGLE_STRIP with gl.TRIANGLES inside your gl.drawElements() call.
If you fix all those mistakes your code will run without errors.

Perspective projection showing nothing

I'm trying to follow WebGLFundamentals.org and LearningWebGL tutorials and I reached the projection part.
I create my scene something like LearningWebGL Tutorial 01 (with only the square):
var canvas;
var gl;
var shaderProgram;
// Vertex Shader
var positionLocation;
var uvMatrixLocation;
var pMatrixLocation;
var uvMatrix = mat4.create();
var pMatrix = mat4.create();
// Fragment Shader
var colorLocation;
var buffer = [];
function initGL() {
canvas = document.getElementById("webgl-canvas");
gl = WebGLUtils.setupWebGL(canvas);
gl.viewportWidth = canvas.width;
gl.viewportHeight = canvas.height;
}
function createShader(gl, id, type) {
var shader;
var shaderSrc = document.getElementById(id);
if (type == "fragment") {
shader = gl.createShader(gl.FRAGMENT_SHADER);
} else if (type == "vertex") {
shader = gl.createShader(gl.VERTEX_SHADER);
} else {
return null;
}
gl.shaderSource(shader, shaderSrc.text);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert(gl.getShaderInfoLog(shader));
return null;
}
return shader;
}
function initShaders() {
var fragmentShader = createShader(gl, "fshader", "fragment");
var vertexShader = createShader(gl, "vshader", "vertex");
shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
// Linka os parametros do shader
positionLocation = gl.getAttribLocation(shaderProgram, "a_position");
uvMatrixLocation = gl.getUniformLocation(shaderProgram, "uvMatrix");
pMatrixLocation = gl.getUniformLocation(shaderProgram, "pMatrix");
gl.enableVertexAttribArray(positionLocation);
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) { alert("Não foi possível inicializar os shaders"); }
}
function initBuffers() {
createPoly([
1.0, 1.0, 0.0,
-1.0, 1.0, 0.0,
1.0, -1.0, 0.0,
-1.0, -1.0, 0.0
]);
}
function draw() {
gl.viewport(0, 0, canvas.width, canvas.height);
gl.clearColor(0.0, 0.0, 0.0, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.useProgram(shaderProgram);
mat4.perspective(pMatrix, Math.PI/3, 1, -10, 10);
gl.uniformMatrix4fv(pMatrixLocation, false, pMatrix);
gl.uniformMatrix4fv(uvMatrixLocation, false, uvMatrix);
buffer.forEach(function(e) {
gl.bindBuffer(gl.ARRAY_BUFFER, e.buffer);
gl.vertexAttribPointer(positionLocation, e.vertSize, gl.FLOAT, false, 0, 0);
gl.drawArrays(e.primtype, 0, e.nVerts());
});
}
window.onload = function() {
initGL();
initShaders();
initBuffers();
draw();
}
// ---------------------------------------------------------------
// --------------------------- Utils -----------------------------
// ---------------------------------------------------------------
function createPoly(vertices) {
var vertexBuffer;
vertexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
var poly = {
buffer: vertexBuffer,
vertSize: 3,
nVerts: function() { return vertices.length/this.vertSize; },
primtype: gl.TRIANGLE_STRIP
};
buffer.push(poly);
}
<script src="https://www.khronos.org/registry/webgl/sdk/demos/common/webgl-utils.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/gl-matrix/2.3.2/gl-matrix-min.js"></script>
<script id="vshader" type="x-shader/x-vertex">
attribute vec3 a_position;
uniform mat4 uvMatrix;
uniform mat4 pMatrix;
varying vec4 v_color;
void main() {
gl_Position = pMatrix * uvMatrix * vec4(a_position, 1);
v_color = gl_Position;
}
</script>
<script id="fshader" type="x-shader/x-fragment">
precision mediump float;
varying vec4 v_color;
void main(void) { gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0); }
</script>
<canvas id="webgl-canvas" width="800" height="600"></canvas>
Then I set the projection on line 85:
Using orthogonal projection mat4.ortho(pMatrix, -5, 5, -5, 5, -5, 5); the square appears on my canvas
when I use perspective mat4.perspective(pMatrix, Math.PI/3, 1, -10, 10); it won't work
I've already tried several parameters
First off normally you'd make zNear and zFar positive numbers. They represent how the area in front of the camera that will be visible. Second is because your uvMatrix is the identity matrix your object as at the origin. The view is also at the origin (see cameras and perspective)
That means in order to view the object you either need to move the object away from the camera or add in a view matrix (which also effectively moves the object away from the origin)
I changed the code to this and it worked
// set zNear to 0.1
mat4.perspective(pMatrix, Math.PI/3, 1, 0.1, 10);
// move the object out from the camera
mat4.translate(uvMatrix, uvMatrix, [0, 0, -5]);
var canvas;
var gl;
var shaderProgram;
// Vertex Shader
var positionLocation;
var uvMatrixLocation;
var pMatrixLocation;
var uvMatrix = mat4.create();
var pMatrix = mat4.create();
// Fragment Shader
var colorLocation;
var buffer = [];
function initGL() {
canvas = document.getElementById("webgl-canvas");
gl = WebGLUtils.setupWebGL(canvas);
gl.viewportWidth = canvas.width;
gl.viewportHeight = canvas.height;
}
function createShader(gl, id, type) {
var shader;
var shaderSrc = document.getElementById(id);
if (type == "fragment") {
shader = gl.createShader(gl.FRAGMENT_SHADER);
} else if (type == "vertex") {
shader = gl.createShader(gl.VERTEX_SHADER);
} else {
return null;
}
gl.shaderSource(shader, shaderSrc.text);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert(gl.getShaderInfoLog(shader));
return null;
}
return shader;
}
function initShaders() {
var fragmentShader = createShader(gl, "fshader", "fragment");
var vertexShader = createShader(gl, "vshader", "vertex");
shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
// Linka os parametros do shader
positionLocation = gl.getAttribLocation(shaderProgram, "a_position");
uvMatrixLocation = gl.getUniformLocation(shaderProgram, "uvMatrix");
pMatrixLocation = gl.getUniformLocation(shaderProgram, "pMatrix");
gl.enableVertexAttribArray(positionLocation);
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) { alert("Não foi possível inicializar os shaders"); }
}
function initBuffers() {
createPoly([
1.0, 1.0, 0.0,
-1.0, 1.0, 0.0,
1.0, -1.0, 0.0,
-1.0, -1.0, 0.0
]);
}
function draw() {
gl.viewport(0, 0, canvas.width, canvas.height);
gl.clearColor(0.0, 0.0, 0.0, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.useProgram(shaderProgram);
mat4.perspective(pMatrix, Math.PI/3, 1, 0.1, 10);
mat4.translate(uvMatrix, uvMatrix, [0, 0, -5]);
gl.uniformMatrix4fv(pMatrixLocation, false, pMatrix);
gl.uniformMatrix4fv(uvMatrixLocation, false, uvMatrix);
buffer.forEach(function(e) {
gl.bindBuffer(gl.ARRAY_BUFFER, e.buffer);
gl.vertexAttribPointer(positionLocation, e.vertSize, gl.FLOAT, false, 0, 0);
gl.drawArrays(e.primtype, 0, e.nVerts());
});
}
window.onload = function() {
initGL();
initShaders();
initBuffers();
draw();
}
// ---------------------------------------------------------------
// --------------------------- Utils -----------------------------
// ---------------------------------------------------------------
function createPoly(vertices) {
var vertexBuffer;
vertexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
var poly = {
buffer: vertexBuffer,
vertSize: 3,
nVerts: function() { return vertices.length/this.vertSize; },
primtype: gl.TRIANGLE_STRIP
};
buffer.push(poly);
}
<script src="https://www.khronos.org/registry/webgl/sdk/demos/common/webgl-utils.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/gl-matrix/2.3.2/gl-matrix-min.js"></script>
<script id="vshader" type="x-shader/x-vertex">
attribute vec3 a_position;
uniform mat4 uvMatrix;
uniform mat4 pMatrix;
varying vec4 v_color;
void main() {
gl_Position = pMatrix * uvMatrix * vec4(a_position, 1);
v_color = gl_Position;
}
</script>
<script id="fshader" type="x-shader/x-fragment">
precision mediump float;
varying vec4 v_color;
void main(void) { gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0); }
</script>
<canvas id="webgl-canvas" width="800" height="600"></canvas>
Change zNear to a small, positive number.

Texture not mapping to square correctly WebGL

For some reason why i map the texture to the boxes i have drawn on the 3d canvas it is not showing correctly, all i get is a blue box and not the full texture.
<script id="shader-fs" type="x-shader/x-fragment" type="text/javascript">
precision mediump float;
varying vec2 vTextureCoord;
uniform sampler2D uSampler;
void main(void) {
gl_FragColor = texture2D(uSampler, vec2(vTextureCoord.s, vTextureCoord.t));
}
</script>
<script id="shader-vs" type="x-shader/x-vertex" type="text/javascript">
attribute vec3 aVertexPosition;
attribute vec2 aTextureCoord;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
varying vec2 vTextureCoord;
void main(void) {
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
vTextureCoord = aTextureCoord;
}
</script>
<script type="text/javascript">
var gl;
var neheTexture;
function initGL(canvas) {
try {
gl = canvas.getContext("experimental-webgl");
gl.viewportWidth = canvas.width;
gl.viewportHeight = canvas.height;
} catch (e) {
}
if (!gl) {
alert("Could not initialise WebGL, sorry :-(");
}
}
function getShader(gl, id) {
var shaderScript = document.getElementById(id);
if (!shaderScript) {
return null;
}
var str = "";
var k = shaderScript.firstChild;
while (k) {
if (k.nodeType == 3) {
str += k.textContent;
}
k = k.nextSibling;
}
var shader;
if (shaderScript.type == "x-shader/x-fragment") {
shader = gl.createShader(gl.FRAGMENT_SHADER);
} else if (shaderScript.type == "x-shader/x-vertex") {
shader = gl.createShader(gl.VERTEX_SHADER);
} else {
return null;
}
gl.shaderSource(shader, str);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert(gl.getShaderInfoLog(shader));
return null;
}
return shader;
}
var shaderProgram;
function initShaders() {
var fragmentShader = getShader(gl, "shader-fs");
var vertexShader = getShader(gl, "shader-vs");
shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
alert("Could not initialise shaders");
}
gl.useProgram(shaderProgram);
shaderProgram.vertexPositionAttribute = gl.getAttribLocation(shaderProgram, "aVertexPosition");
gl.enableVertexAttribArray(shaderProgram.vertexPositionAttribute);
shaderProgram.pMatrixUniform = gl.getUniformLocation(shaderProgram, "uPMatrix");
shaderProgram.mvMatrixUniform = gl.getUniformLocation(shaderProgram, "uMVMatrix");
}
var mvMatrix = mat4.create();
var pMatrix = mat4.create();
function setMatrixUniforms() {
gl.uniformMatrix4fv(shaderProgram.pMatrixUniform, false, pMatrix);
gl.uniformMatrix4fv(shaderProgram.mvMatrixUniform, false, mvMatrix);
}
var squareVertexPositionBuffer;
function initBuffers() {
squareVertexPositionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, squareVertexPositionBuffer);
vertices = [
1.0, 1.0, 0.0,
-1.0, 1.0, 0.0,
1.0, -1.0, 0.0,
-1.0, -1.0, 0.0
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
squareVertexPositionBuffer.itemSize = 3;
squareVertexPositionBuffer.numItems = 4;
}
var z = -50.0;
var eye = vec3.create([0, 0, z]); // negation of actual eye position
var pvMatrix = mat4.create();
var pvMatrixInverse = mat4.create();
function drawScene() {
var canvas = document.getElementById("MyCanvas");
var widthamount = Math.round(canvas.width/20);
var heightamount = Math.round(canvas.height / 20);
gl.viewport(0, 0, gl.viewportWidth, gl.viewportHeight);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
mat4.perspective(90, gl.viewportWidth / gl.viewportHeight, 0.1, 3000.0, pMatrix);
mat4.identity(mvMatrix);
// calculate the view transform mvMatrix, and the projection-view matrix pvMatrix
mat4.translate(mvMatrix, eye);
mat4.multiply(pMatrix, mvMatrix, pvMatrix);
var startHeight = -((heightamount * 2.1) / 2);
var startWidth = -((widthamount * 2.1) / 2);
mat4.translate(mvMatrix, [startWidth, startHeight, 0.0]);
for (i = 0; i < heightamount; ++i) {
for (q = 0; q < widthamount; ++q) {
mat4.translate(mvMatrix, [2.1, 0.0, 0.0]);
gl.bindBuffer(gl.ARRAY_BUFFER, squareVertexPositionBuffer);
gl.vertexAttribPointer(shaderProgram.vertexPositionAttribute, squareVertexPositionBuffer.itemSize, gl.FLOAT, false, 0, 0);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, neheTexture);
gl.uniform1i(shaderProgram.samplerUniform, 0);
setMatrixUniforms();
gl.drawArrays(gl.TRIANGLE_STRIP, 0, squareVertexPositionBuffer.numItems);
}
mat4.translate(mvMatrix, [-(widthamount*2.1), 2.1, 0.0]);
}
}
function webGLStart() {
resizeCanvas();
var canvas = document.getElementById("MyCanvas");
initGL(canvas);
initShaders();
initBuffers();
initTexture();
gl.clearColor(0.0, 0.0, 0.0, 1.0);
gl.enable(gl.DEPTH_TEST);
window.addEventListener('resize', resizeCanvas, false);
tick();
}
function resizeCanvas() {
var canvas = document.getElementById("MyCanvas");
canvas.width = window.innerWidth;
canvas.height = window.innerHeight;
//drawScene();
}
function handleLoadedTexture(texture) {
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, texture.image);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.bindTexture(gl.TEXTURE_2D, null);
}
function initTexture() {
neheTexture = gl.createTexture();
neheTexture.image = new Image();
neheTexture.image.onload = function () {
handleLoadedTexture(neheTexture)
}
neheTexture.image.src = "nehe.gif";
}
</script>
Image for texture looks like a full texture https://github.com/gpjt/webgl-lessons/blob/master/lesson05/nehe.gif
However boxes turn out to show like a blue box, i need 10 rep to put images :(((
You have no texture coordinates.
You need to set up a buffer with texture coordinates
squareTextureCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, squareTextureCoordBuffer);
texcoords = [
1.0, 1.0,
0.0, 1.0,
1.0, 0.0,
0.0, 0.0,
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(texcoords), gl.STATIC_DRAW);
Then you're going to need to look up the location of the where the shader wants the texture coordinates
textureCoordAttribute = gl.getAttribLocation(shaderProgram, "aTextureCoord");
And set up that attribute
gl.enableVertexAttribArray(textureCoordAttribute);
gl.vertexAttribPointer(textureCoordAttribute, 2, gl.FLOAT, false, 0, 0);
Also while we're at it there's a bunch of minor things about the code you might want to consider
The code is turning on attributes in initShaders but if you have multiple shaders you'll need to turn on/off attributes in your draw call
The code is assigning properties to WebGL objects as in shaderProgram.pMatrixUniform = ... and squareVertexPositionBuffer.itemSize = 3; but if you ever decide to handle WebGLContextLost events that code will break because gl.createBuffer and gl.createShader will return null which means your code will be doing null.itemSize = 3 for example. It's better to make your own objects with WebGL as in var buffer = { id: gl.createBuffer(), itemSize: 3, ...}; so that if you do decide to handle WebGLContextLost events your code won't break.
The code is setting the attributes and uniforms for every face of the cube. But since they're the same for every face it would be more efficient to set the ones that don't change just once outside the loop.
The stuff about gl.viewportWidth and gl.viewportHeight is semi confusing since it makes it look like viewportWidth and viewportHeight are official parts of WebGL even though they are not. On top of that if you have a canvas that scales or changes shape they'll get out of sync which it does in resizeCanvas. Better just to use gl.canvas.width, gl.canvas.height directly or even better gl.drawBufferWidth, gl.drawingBufferHeight
The size the canvas is displayed is separate from its resolution. It's more appropriate to set your aspect ratio to the size it is displayed. In other words mat4.perspective(90, gl.canvas.clientWidth / gl.canvas.clientHeight, 0.1, 3000.0, pMatrix); will make it render in the correct aspect ratio regardless of the resolution of the canvas.

Categories

Resources