WebGL Multi-Textured Cube - javascript

I need to texture a cube with different textures for each side, unless there is an advantage doing it by single side. This is my Vertex Shader:..
precision mediump float;
attribute vec3 vertPosition;
attribute vec2 vertTexCoord;
attribute float aFace;
uniform mat4 mWorld;
uniform mat4 mView;
uniform mat4 mProj;
varying vec2 fragTexCoord;
varying float vFace;
void main()
{
fragTexCoord = vertTexCoord;
vFace = aFace;
gl_Position = mProj * mView * mWorld * vec4(vertPosition, 1.0);
}
Fragment Shader:..
precision mediump float;
uniform sampler2D front;
uniform sampler2D back;
uniform sampler2D top;
uniform sampler2D bottom;
uniform sampler2D right;
uniform sampler2D left;
varying vec2 fragTexCoord;
varying float vFace;
void main()
{
if(vFace < 0.1)
gl_FragColor = texture2D(front, fragTexCoord);
else if(vFace < 1.1)
gl_FragColor = texture2D(back, fragTexCoord);
else if(vFace < 2.1)
gl_FragColor = texture2D(top, fragTexCoord);
else if(vFace < 3.1)
gl_FragColor = texture2D(bottom, fragTexCoord);
else if(vFace < 4.1)
gl_FragColor = texture2D(right, fragTexCoord);
else
gl_FragColor = texture2D(left, fragTexCoord);
}totorials
Then before I start rendering this runs. (The variables are globally defined):..
cubeVertexBufferObject = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, cubeVertexBufferObject);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(cubeVertices), gl.STATIC_DRAW);
cubeIndexBufferObject = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, cubeIndexBufferObject);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(cubeIndices), gl.STATIC_DRAW);
textureLookUpBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, textureLookUpBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(lookUpArray), gl.STATIC_DRAW);
cubeVertexTextureCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, cubeVertexTextureCoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(textureCoords), gl.STATIC_DRAW);
positionAttributeLocation = gl.getAttribLocation(program, 'vertPosition');
gl.vertexAttribPointer(
positionAttributeLocation,
3,
gl.FLOAT,
gl.FALSE,
5 * Float32Array.BYTES_PER_ELEMENT,
0
);
texCoordAttributeLocation = gl.getAttribLocation(program, 'vertTexCoord');
gl.vertexAttribPointer(
texCoordAttributeLocation,
2,
gl.FLOAT,
gl.FALSE,
5 * Float32Array.BYTES_PER_ELEMENT,
3 * Float32Array.BYTES_PER_ELEMENT
);
textureLookUpAttribute = gl.getAttribLocation(program, "aFace");
gl.vertexAttribPointer(
textureLookUpAttribute,
1,
gl.FLOAT,
false,
0,
0
);
faces.front = gl.getUniformLocation(program,"front");
faces.back = gl.getUniformLocation(program,"back");
faces.top = gl.getUniformLocation(program,"top");
faces.bottom = gl.getUniformLocation(program,"bottom");
faces.right = gl.getUniformLocation(program,"right");
faces.left = gl.getUniformLocation(program,"left");
//
cubeMatWorldUniformLocation = gl.getUniformLocation(program, 'mWorld');
cubeMatViewUniformLocation = gl.getUniformLocation(program, 'mView');
cubeMatProjUniformLocation = gl.getUniformLocation(program, 'mProj');
worldMatrix = new Float32Array(16);
viewMatrix = new Float32Array(16);
projMatrix = new Float32Array(16);
mat4.identity(worldMatrix);
mat4.lookAt(viewMatrix, [0, 0, -8], [0, 0, 0], [0, 1, 0]);
mat4.perspective(projMatrix, glMatrix.toRadian(45), Canvas.width / Canvas.height, 0.1, 1000.0);
gl.uniformMatrix4fv(cubeMatWorldUniformLocation, gl.FALSE, worldMatrix);
gl.uniformMatrix4fv(cubeMatViewUniformLocation, gl.FALSE, viewMatrix);
gl.uniformMatrix4fv(cubeMatProjUniformLocation, gl.FALSE, projMatrix);
Finally every time I render this runs:..
gl.bindBuffer(gl.ARRAY_BUFFER, cubeVertexTextureCoordBuffer);
gl.vertexAttribPointer(texCoordAttributeLocation, 2, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, cubeVertexBufferObject);
gl.vertexAttribPointer(texCoordAttributeLocation, 2, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, textureLookUpBuffer);
gl.vertexAttribPointer(textureLookUpAttribute, 1, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, cubeIndexBufferObject);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.uniform1i(faces.front, 0);
gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, grass);
gl.uniform1i(faces.back, 1);
gl.activeTexture(gl.TEXTURE2);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.uniform1i(faces.top, 2);
gl.activeTexture(gl.TEXTURE3);
gl.bindTexture(gl.TEXTURE_2D, grass);
gl.uniform1i(faces.bottom, 3);
gl.activeTexture(gl.TEXTURE4);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.uniform1i(faces.right, 4);
gl.activeTexture(gl.TEXTURE5);
gl.bindTexture(gl.TEXTURE_2D, grass);
gl.uniform1i(faces.left, 5);
gl.drawElements(gl.TRIANGLES, 36, gl.UNSIGNED_SHORT, 0);
It gives me error messages. Their different when I changed the code slightly(When I was trying to fix it) so that why i don't have an exact error message. If you need the different arrays for the cube i can post them. I was fallowing online tutorials on how to do this(on in general) but obviously as you can see it didn't work.

The most common way to texture a cube with a different face on each side is to use a texture atlas. Put all the faces in 1 texture and use texture coordinates to select the right part of the texture for each face.
See the bottom of this article
The advantages are there's 1 texture. 1 texture unit to setup. 1 uniform to set. More samplers left for other things (normal maps, etc..) A simple shader that runs faster.

Related

How to keep textures distorted

I am a novice in webgl. I have written an effect of distorting the texture with the mouse.
I want to keep the distorted texture and not let the texture return to its original appearance.
My idea is to use FBO or copyteximage2d, but the implementation When it fails, it may be wrong in writing or thinking, how do I write to make it happen. Thanks in advance!
attribute vec2 a_position;
attribute vec2 a_texCoord;
uniform vec2 u_resolution;
varying vec2 v_texCoord;
void main() {
vec2 zeroToOne = a_position / u_resolution;
vec2 zeroToTwo = zeroToOne * 2.0;
vec2 clipSpace = zeroToTwo - 1.0;
gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);
v_texCoord = a_texCoord;
}
precision mediump float;
uniform sampler2D u_image;
uniform vec2 u_mouse;
varying vec2 v_texCoord;
vec2 pinch(vec2 vector)
{
vec2 center = u_mouse + vector;
float dist = distance(v_texCoord, u_mouse);
vec2 point = u_mouse + smoothstep(0., 1., dist / 0.1) * vector;
return (v_texCoord - center) + point;
}
void main() {
vec2 vector = vec2(0.035, 0.035);
vec2 tex = pinch(vector);
gl_FragColor = texture2D(u_image, tex);
}
var image = new Image();
function main() {
image.crossOrigin = "anonymous";
image.src = "https://hips.hearstapps.com/hmg-prod/images/face-wash-gettyimages-489974588-1557345669.jpg";
image.onload = function() {
render(image);
}
}
main();
function shaderSourceById(id) {
return document.getElementById(id).textContent;
}
function shader(glContext, type, source) {
const shader = glContext.createShader(type);
glContext.shaderSource(shader, source);
glContext.compileShader(shader);
if (!glContext.getShaderParameter(shader, glContext.COMPILE_STATUS)) {
throw 'err' + glContext.getShaderInfoLog(shader);
}
return shader;
}
function installProgram(glContext, vertexSource, fragSource) {
const vertexShader = shader(glContext, glContext.VERTEX_SHADER, vertexSource);
const fragShader = shader(glContext, glContext.FRAGMENT_SHADER, fragSource);
const prog = glContext.createProgram();
glContext.attachShader(prog, vertexShader);
glContext.attachShader(prog, fragShader);
glContext.linkProgram(prog);
if (!glContext.getProgramParameter(prog, glContext.LINK_STATUS)) {
throw 'err' + glContext.getProgramInfoLog(prog);
}
glContext.useProgram(prog);
return prog;
}
function getGLContext(glCanvas) {
glCanvas.width = glCanvas.clientWidth;
glCanvas.height = glCanvas.clientHeight;
const gl = glCanvas.getContext('webgl');
if (!gl) {
throw 'Browser not supported';
}
gl.clearColor(1.0, 1.0, 1.0, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT);
return gl;
}
const gl = getGLContext(document.getElementById('glCanvas'), {preserveDrawingBuffer: true});
const prog = installProgram(gl,
shaderSourceById('vertex-shader'),
shaderSourceById('fragment-shader')
);
var mouseLocation = gl.getUniformLocation(prog, "u_mouse");
function render(image) {
var positionLocation = gl.getAttribLocation(prog, "a_position");
var resolutionLocation = gl.getUniformLocation(prog, "u_resolution");
var texCoordLocation = gl.getAttribLocation(prog, "a_texCoord");
var positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
setRectangle(gl, 0, 0, image.width, image.height);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
gl.uniform2f(resolutionLocation, gl.canvas.width, gl.canvas.height);
var texCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0.0, 0.0,
1.0, 0.0,
0.0, 1.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0,]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(texCoordLocation);
gl.vertexAttribPointer(texCoordLocation, 2, gl.FLOAT, false, 0, 0);
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
var result = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
if (result != gl.FRAMEBUFFER_COMPLETE) {
alert("unsupported framebuffer");
return;
}
var newTex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, newTex);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); // 4
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.copyTexImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 0, 0, image.width, image.height, 0);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
function setRectangle(gl, x, y, width, height) {
var x1 = x;
var x2 = x + width;
var y1 = y;
var y2 = y + height;
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
x1, y1,
x2, y1,
x1, y2,
x1, y2,
x2, y1,
x2, y2,
]), gl.STATIC_DRAW);
}
document.getElementById('glCanvas').addEventListener('mousemove', function(e) {
let x = e.x / image.width;
let y = e.y / image.height;
gl.uniform2f(mouseLocation, x, y);
gl.drawArrays(gl.TRIANGLES, 0, 6);
});

How to work with framebuffers in webgl?

I have been trying to understand framebuffer in WebGL/OpenGL-ES.
I know that we can blend multiple textures using framebuffer.
So, to understand that I wrote a sample by taking a 1*1 texture and tried to apply framebuffer logic on top of it.
But , it didn't work.
See snippet at bottom, if you click on "mix red and blue", the images doesn't get rendered, am I doing anything wrong?
Code :
`
var canvas, gl, attrPosition, texture, program, vertexBuffer, textureBuffer, vertices, texVertices, attrPos, attrTexPos, textures = [], framebuffers = [];
canvas = document.getElementById('canvas');
gl = getWebGL();
vertices = new Float32Array([
-1.0, -1.0,
1.0, -1.0,
1.0, 1.0,
-1.0, 1.0,
-1.0, -1.0,
]);
texVertices = new Float32Array([
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
0.0, 0.0
]);
var getProgram = function () {
var vs = createVertexShader([
'attribute vec2 attrPos;',
'attribute vec2 attrTexPos;',
'varying highp vec2 vTexCoord;',
'void main() {',
'\tgl_Position = vec4(attrPos, 0.0, 1.0);',
'}'
].join('\n'));
var fs = createFragmentShader([
'varying highp vec2 vTexCoord;',
'uniform sampler2D uImage;',
'void main() {',
'\tgl_FragColor = texture2D(uImage, vTexCoord);',
'}'
].join('\n'));
return createAndLinkPrograms(vs, fs);
};
var render = function () {
gl.clear(gl.DEPTH_BUFFER_BIT|gl.COLOR_BUFFER_BIT);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.vertexAttribPointer(attrPos, 2, gl.FLOAT, gl.FALSE, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, textureBuffer);
gl.vertexAttribPointer(attrTexPos, 2, gl.FLOAT, gl.FALSE, 0, 0);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 5);
};
if (gl) {
gl.clearColor(0.1, 0.5, 1.0, 1.0);
render();
program = getProgram();
texture = createAndSetupTexture();
vertexBuffer = createAndBindBuffer(vertices, gl.ARRAY_BUFFER);
attrPos = gl.getUniformLocation(program, 'attrPos');
gl.enableVertexAttribArray(attrPos);
textureBuffer = createAndBindBuffer(texVertices, gl.ARRAY_BUFFER);
attrTexPos = gl.getUniformLocation(program, 'attrTexPos');
gl.enableVertexAttribArray(attrTexPos);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([123, 0, 60, 255]));
render();
}
var initPingPongTextures = function(textures, framebuffers) {
for (var i = 0; i < 2; ++i) {
var tex = createAndSetupTexture(gl);
textures.push(tex);
// make the texture the same size as the image
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
// Create a framebuffer
var fbo = gl.createFramebuffer();
framebuffers.push(fbo);
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
// Attach a texture to it.
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
}
}
var setFramebuffer = function(fbo, width, height) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
gl.viewport(0, 0, width, height);
};
var mixRedAndBlue = function () {
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
setFramebuffer(framebuffers[0], 1, 1);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([255, 0, 0, 255]));
render();
gl.bindTexture(gl.TEXTURE_2D, textures[0]);
setFramebuffer(framebuffers[1], 1, 1);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0, 255, 0, 255]));
render();
gl.bindTexture(gl.TEXTURE_2D, textures[1]);
setFramebuffer(null, 1, 1);
render();
};`
var getWebGLContext = function(canvas) {
var webglContextParams = ['webgl', 'experimental-webgl', 'webkit-3d', 'moz-webgl'];
var webglContext = null;
for (var index = 0; index < webglContextParams.length; index++) {
try {
webglContext = canvas.getContext(webglContextParams[index]);
if(webglContext) {
//breaking as we got our context
break;
}
} catch (E) {
console.log(E);
}
}
if(webglContext === null) {
alert('WebGL is not supported on your browser.');
} else {
//WebGL is supported in your browser, lets render the texture
}
fillGLForCleanUp(webglContext);
return webglContext;
}
var createVertexShader = function (vertexShaderSource) {
console.log(vertexShaderSource);
var vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertexShaderSource);
gl.compileShader(vertexShader);
return vertexShader;
}
var createFragmentShader = function (fragmentShaderSource) {
console.log(fragmentShaderSource);
var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragmentShaderSource);
gl.compileShader(fragmentShader);
return fragmentShader;
}
var createAndLinkPrograms = function (vertexShader, fragmentShader) {
var program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
alert('Could not initialise shaders');
}
gl.useProgram(program);
return program;
}
var createAndBindBuffer = function (verticesOrIndices, bufferType) {
var buffer = gl.createBuffer();
gl.bindBuffer(bufferType, buffer);
gl.bufferData(bufferType, verticesOrIndices, gl.STATIC_DRAW);
//clear memory
gl.bindBuffer(bufferType, null);
return buffer;
}
var allowAllImageSizes = function() {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
// gl.bindTexture(gl.TEXTURE_2D, null);
}
var createAndSetupTexture = function() {
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
allowAllImageSizes();
gl.textures.push(texture);
return texture;
}
var getWebGL = function (canvas, width, height) {
if(!canvas) {
canvas = document.createElement('canvas');
canvas.id = 'canvas';
canvas.width = !width ? 512 : width;
canvas.height = !height ? 512 : height;
document.body.appendChild(canvas);
} else {
canvas.width = !width ? 512 : width;
canvas.height = !height ? 512 : height;
}
return getWebGLContext(canvas);
}
var fillGLForCleanUp = function (gl) {
gl.textures = [];
gl.framebuffers = [];
gl.array_buffer = [];
gl.element_array_buffers = [];
}
var canvas, gl, attrPosition, texture, program,
vertexBuffer, textureBuffer, vertices, texVertices,
attrPos, attrTexPos, textures = [], framebuffers = [];
canvas = document.getElementById('canvas');
gl = getWebGL(canvas);
vertices = new Float32Array([
-1.0, -1.0,
1.0, -1.0,
1.0, 1.0,
-1.0, 1.0,
-1.0, -1.0,
]);
texVertices = new Float32Array([
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
0.0, 0.0
]);
var getProgram = function () {
var vs = createVertexShader([
'attribute vec2 attrPos;',
'attribute vec2 attrTexPos;',
'varying highp vec2 vTexCoord;',
'void main() {',
'\tgl_Position = vec4(attrPos, 0.0, 1.0);',
'}'
].join('\n'));
var fs = createFragmentShader([
'varying highp vec2 vTexCoord;',
'uniform sampler2D uImage;',
'void main() {',
'\tgl_FragColor = texture2D(uImage, vTexCoord);',
'}'
].join('\n'));
return createAndLinkPrograms(vs, fs);
};
var render = function () {
gl.clear(gl.DEPTH_BUFFER_BIT|gl.COLOR_BUFFER_BIT);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.vertexAttribPointer(attrPos, 2, gl.FLOAT, gl.FALSE, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, textureBuffer);
gl.vertexAttribPointer(attrTexPos, 2, gl.FLOAT, gl.FALSE, 0, 0);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 5);
};
if (gl) {
gl.clearColor(0.1, 0.5, 1.0, 1.0);
render();
program = getProgram();
texture = createAndSetupTexture();
vertexBuffer = createAndBindBuffer(vertices, gl.ARRAY_BUFFER);
attrPos = gl.getUniformLocation(program, 'attrPos');
gl.enableVertexAttribArray(attrPos);
textureBuffer = createAndBindBuffer(texVertices, gl.ARRAY_BUFFER);
attrTexPos = gl.getUniformLocation(program, 'attrTexPos');
gl.enableVertexAttribArray(attrTexPos);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([123, 0, 60, 255]));
render();
}
var initPingPongTextures = function(textures, framebuffers) {
for (var i = 0; i < 2; ++i) {
var tex = createAndSetupTexture(gl);
textures.push(tex);
// make the texture the same size as the image
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
// Create a framebuffer
var fbo = gl.createFramebuffer();
framebuffers.push(fbo);
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
// Attach a texture to it.
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
}
}
var setFramebuffer = function(fbo, width, height) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
gl.viewport(0, 0, width, height);
};
var mixRedAndBlue = function () {
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
setFramebuffer(framebuffers[0], 1, 1);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([255, 0, 0, 255]));
render();
gl.bindTexture(gl.TEXTURE_2D, textures[0]);
setFramebuffer(framebuffers[1], 1, 1);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0, 255, 0, 255]));
render();
gl.bindTexture(gl.TEXTURE_2D, textures[1]);
setFramebuffer(null, 1, 1);
render();
};
<button id="redImg" onclick="mixRedAndBlue()">Mix Red and blue</button><hr/>
<canvas id="canvas" width=512 height=512></canvas>
Edit 1 :
I am trying to achieve the same for multiple programs with multiple fragment shaders because having if/else statements within the fragment shader is not recommended as it runs for each pixel.
`
Shaders.prototype.VS_Base = [
'attribute vec3 verticesPosition;',
'attribute vec2 texturePosition;',
'varying highp vec2 vTextureCoord;',
'void main(void) {',
'\tgl_Position = vec4(verticesPosition * vec3(1.0, -1.0, 1.0), 0.5);',
'\tvTextureCoord = texturePosition;',
'}'
].join('\n');
Shaders.prototype.FS_Base_Image_RED = [
'#ifdef GL_ES',
'precision highp float;',
'#endif',
'uniform sampler2D uImage;',
'varying highp vec2 vTextureCoord;',
'void main (void) {',
'\tgl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);//texture2D(uImage, vTextureCoord);',
'}'
].join('\n');
Shaders.prototype.FS_Base_Image_BLUE = [
'#ifdef GL_ES',
'precision highp float;',
'#endif',
'uniform sampler2D uImage;',
'varying highp vec2 vTextureCoord;',
'void main (void) {',
'\tgl_FragColor = vec4(0.0, 0.0, 1.0, 1.0);//texture2D(uImage, vTextureCoord);',
'}'
].join('\n');`
Now I have 2 separate programs for both the fragment shader and I need to use framebuffers for mixing Red and Blue. I am not looking for mix() as the actual scenario is very complex and that's the reason I am using multiple programs with fragment shaders for avoiding conditional if/else statements.
It's not clear what you're trying to do. Framebuffers are just a list of attachments (textures and renderbuffers). You use them to render to a texture and/or renderbuffer. Then you can use the texture you just rendered to as input to some other render.
Here's an example with NO framebuffers. It blends 2 textures.
var vs = `
attribute vec4 position;
varying vec2 v_texcoord;
void main() {
gl_Position = position;
v_texcoord = position.xy * .5 + .5;
}
`;
var fs = `
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D tex1;
uniform sampler2D tex2;
void main() {
vec4 color1 = texture2D(tex1, v_texcoord);
vec4 color2 = texture2D(tex2, v_texcoord);
gl_FragColor = mix(color1, color2, 0.5);
}
`;
const gl = document.querySelector("canvas").getContext("webgl");
const program = twgl.createProgramFromSources(gl, [vs, fs]);
// make 2 textures with canvas 2d
const ctx = document.createElement("canvas").getContext("2d");
ctx.canvas.width = 64;
ctx.canvas.height = 64;
// first texture has a circle
ctx.fillStyle = "blue";
ctx.fillRect(0, 0, 64, 64);
ctx.strokeStyle = "yellow";
ctx.beginPath();
ctx.arc(32, 32, 20, 0, Math.PI * 2, false);
ctx.lineWidth = 12;
ctx.stroke();
const tex1 = createTextureFromCanvas(gl, ctx.canvas);
// second texture has a diamond (diagonal square)
ctx.fillStyle = "red";
ctx.fillRect(0, 0, 64, 64);
ctx.fillStyle = "cyan";
ctx.beginPath();
ctx.moveTo(32, 6);
ctx.lineTo(58, 32);
ctx.lineTo(32, 58);
ctx.lineTo(6, 32);
ctx.lineTo(32, 6);
ctx.fill();
const tex2 = createTextureFromCanvas(gl, ctx.canvas);
const buf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buf);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-1, -1,
1, -1,
-1, 1,
-1, 1,
1, -1,
1, 1,
]), gl.STATIC_DRAW);
const positionLoc = gl.getAttribLocation(program, "position");
gl.enableVertexAttribArray(positionLoc);
gl.vertexAttribPointer(positionLoc, 2, gl.FLOAT, false, 0, 0);
const tex1Loc = gl.getUniformLocation(program, "tex1");
const tex2Loc = gl.getUniformLocation(program, "tex2");
gl.useProgram(program);
gl.uniform1i(tex1Loc, 0);
gl.uniform1i(tex2Loc, 1);
gl.activeTexture(gl.TEXTURE0 + 0);
gl.bindTexture(gl.TEXTURE_2D, tex1);
gl.activeTexture(gl.TEXTURE0 + 1);
gl.bindTexture(gl.TEXTURE_2D, tex2);
gl.drawArrays(gl.TRIANGLES, 0, 6);
function createTextureFromCanvas(gl, canvas) {
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, ctx.canvas);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
return tex;
}
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/2.x/twgl.min.js"></script>
<canvas></canvas>
For your purpose there is no difference about the blending part, the only difference is where the textures come from. Above the textures were created by using a 2d canvas. Instead you can use framebuffer to render to a texture. AFTER you've rendered to a texture you can then use that texture in some other render just like above.
To render to a texture first you create a framebuffer
var fb = gl.createFramebuffer();
Then you attach a texture to it
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(
gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0, // attach texture as COLOR_ATTACHMENT0
gl.TEXTURE_2D, // attach a 2D texture
someTexture, // the texture to attach
0); // the mip level to render to (must be 0 in WebGL1)
Depending on your attachments you should check if they work.
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) !== gl.FRAMEBUFFER_COMPLETE) {
// these attachments don't work
}
The WebGL spec lists 3 combinations of attachments that are guaranteed to work. The example below is using one of those 3 so there's no need to check
Now if you bind the framebuffer
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
Then when you call any gl.drawXXX function or gl.clear it will be drawing to the someTexture instead of the canvas. To start drawing to the canvas again bind null
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
Remember that if the canvas and the texture are different sizes you'll need to call gl.viewport to render correctly
var vs = `
attribute vec4 position;
uniform mat4 matrix;
varying vec2 v_texcoord;
void main() {
gl_Position = matrix * position;
v_texcoord = position.xy * .5 + .5;
}
`;
var colorFS = `
precision mediump float;
uniform vec4 color;
void main() {
gl_FragColor = color;
}
`;
var mixFS = `
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D tex1;
uniform sampler2D tex2;
void main() {
// probably should use different texture coords for each
// texture for more flexibility but I'm lazy
vec4 color1 = texture2D(tex1, v_texcoord);
vec4 color2 = texture2D(tex2, v_texcoord);
gl_FragColor = mix(color1, color2, 0.5);
}
`;
const gl = document.querySelector("canvas").getContext("webgl");
const colorProgram = twgl.createProgramFromSources(gl, [vs, colorFS]);
const mixProgram = twgl.createProgramFromSources(gl, [vs, mixFS]);
// make 2 textures by attaching them to framebuffers and rendering to them
const texFbPair1 = createTextureAndFramebuffer(gl, 64, 64);
const texFbPair2 = createTextureAndFramebuffer(gl, 64, 64);
const buf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buf);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-1, -1,
1, -1,
-1, 1,
-1, 1,
1, -1,
1, 1,
]), gl.STATIC_DRAW);
function setAttributes(buf, positionLoc) {
gl.enableVertexAttribArray(positionLoc);
gl.vertexAttribPointer(positionLoc, 2, gl.FLOAT, false, 0, 0);
}
const colorPrgPositionLoc = gl.getAttribLocation(colorProgram, "position");
setAttributes(buf, colorPrgPositionLoc);
const colorLoc = gl.getUniformLocation(colorProgram, "color");
const colorProgMatrixLoc = gl.getUniformLocation(colorProgram, "matrix");
// draw red rect to first texture through the framebuffer it's attached to
gl.useProgram(colorProgram);
gl.bindFramebuffer(gl.FRAMEBUFFER, texFbPair1.fb);
gl.viewport(0, 0, 64, 64);
gl.uniform4fv(colorLoc, [1, 0, 0, 1]);
gl.uniformMatrix4fv(colorProgMatrixLoc, false, [
0.5, 0, 0, 0,
0,.25, 0, 0,
0, 0, 1, 0,
.2,.3, 0, 1,
]);
gl.drawArrays(gl.TRIANGLES, 0, 6);
// Draw a blue rect to the second texture through the framebuffer it's attached to
gl.bindFramebuffer(gl.FRAMEBUFFER, texFbPair2.fb);
gl.viewport(0, 0, 64, 64);
gl.uniform4fv(colorLoc, [0, 0, 1, 1]);
gl.uniformMatrix4fv(colorProgMatrixLoc, false, [
0.25, 0, 0, 0,
0,.5, 0, 0,
0, 0, 1, 0,
.2,.3, 0, 1,
]);
gl.drawArrays(gl.TRIANGLES, 0, 6);
// Draw both textures to the canvas
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
const mixPrgPositionLoc = gl.getAttribLocation(mixProgram, "position");
setAttributes(buf, mixPrgPositionLoc);
const mixProgMatrixLoc = gl.getUniformLocation(mixProgram, "matrix");
const tex1Loc = gl.getUniformLocation(mixProgram, "tex1");
const tex2Loc = gl.getUniformLocation(mixProgram, "tex2");
gl.useProgram(mixProgram);
gl.uniform1i(tex1Loc, 0);
gl.uniform1i(tex2Loc, 1);
gl.activeTexture(gl.TEXTURE0 + 0);
gl.bindTexture(gl.TEXTURE_2D, texFbPair1.tex);
gl.activeTexture(gl.TEXTURE0 + 1);
gl.bindTexture(gl.TEXTURE_2D, texFbPair2.tex);
gl.uniformMatrix4fv(mixProgMatrixLoc, false, [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1,
]);
gl.drawArrays(gl.TRIANGLES, 0, 6);
function createTextureAndFramebuffer(gl, width, height) {
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
return {tex: tex, fb: fb};
}
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/2.x/twgl.min.js"></script>
<canvas></canvas>
The only functional difference between the first program and the second is how the textures got their data. In the first example the textures got their data from a canvas 2d. In the 2nd example the textures got their data by rendering to them using WebGL.
As for why your example doesn't blend textures, in order to blend 2 textures you need a shader that uses two textures.

WebGL example won't work with supplied uniforms matrices

I'm finding few difficulties as i'm moving from opengl to webgl
The triangle example worked fine until I added uniform matrices, which is why I'll just include the relevant code
hopefully you could spot the error(s)
var gl;
function webGLStart()
{
var canvas = document.getElementById("glcanvas");
gl = canvas.getContext("experimental-webgl");
gl.viewport(0, 0, 500, 500);
if(!gl)
{ alert("Could Not Initialize WebGL"); }
//initialize shaders
var Program = getShader("triangle_vert", "triangle_frag", false);
//initialize buffers
var triangleVBO = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, triangleVBO);
var vertices = [ //position //color
-1.0, -1.0, -5.0, 1.0, 0.0, 0.0,
+0.0, +1.0, -5.0, 0.0, 1.0, 0.0,
+1.0, -1.0, -5.0, 0.0, 0.0, 1.0
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
var step = Float32Array.BYTES_PER_ELEMENT;
gl.vertexAttribPointer(0, 3, gl.FLOAT, false, step * 6, 0);
gl.enableVertexAttribArray(0);
gl.vertexAttribPointer(1, 3, gl.FLOAT, false, step * 6, step * 3);
gl.enableVertexAttribArray(1);
gl.clearColor(0.0, 0.0, 0.0, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
//setting matrices and uniforms
var Model = mat4.create();
var View = mat4.create();
var Projection = mat4.create();
mat4.identity(Model);
mat4.identity(View);
mat4.perspective(45, 500.0/500.0, 0.1, 100.0, Projection);
gl.useProgram(Program);
gl.uniformMatrix4fv(gl.getUniformLocation(Program, "Model"), false, Model);
gl.uniformMatrix4fv(gl.getUniformLocation(Program, "View"), false, View);
gl.uniformMatrix4fv(gl.getUniformLocation(Program, "Projection"), false, Projection);
gl.drawArrays(gl.TRIANGLES, 0, 3);
}
Vertex / fragment shaders:
<head>
<script id="triangle_frag" type="frag">
precision mediump float;
varying vec3 Col;
void main(void) {
gl_FragColor = vec4(Col, 1.0);
}
</script>
<script id="triangle_vert" type="vert">
attribute vec3 aVertexPosition;
attribute vec3 color;
varying vec3 Col;
uniform mat4 Model;
uniform mat4 View;
uniform mat4 Projection;
void main(void) {
gl_Position = Projection * View * Model * vec4(aVertexPosition, 1.0);
Col = color;
}
</script>
//...
I'm using gl-matrix-min.js and trying to run the example with firefox on windows 7

Webgl apply 1D texture on a line

I have an webgl application with a lot of segments (gl.LINES). Each segment needs to apply a specific 1D texture, or if you prefer, each pixels of a segment needs to have a specific color. This will be use as an alternative to an histogram on the segment.
I made a test texture of 4 pixels from red to black. I would have liked to see a gradient from red to black, but instead the output is a red only line.
Here is my getTextureFromPixelArray method:
Scene.getTextureFromPixelArray = function(data, width, height) {
if (_.isArray(data) && data.length) {
var gl = this._context;
data = new Uint8Array(data);
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, data);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.bindTexture(gl.TEXTURE_2D, null);
return texture;
} else {
this.log("Unable to create texture");
}
return null;
};
Here is my code for generating a test texture 1D :
var verticles = [];
verticles.push(this.vertex1.position[0]);
verticles.push(this.vertex1.position[1]);
verticles.push(this.vertex2.position[0]);
verticles.push(this.vertex2.position[1]);
var color = [];
var textureWidth = 4;
var textureHeight = 1;
var textureCoords = [];
for (var i=0;i<textureHeight;i++) {
for (var j=0;j<textureWidth;j++) {
color.push(Math.round(255 - j*255/(textureWidth-1))); // Red
color.push(0); // Green
color.push(0); // Blue
color.push(255); // Alpha
}
}
textureCoords.push(0);
textureCoords.push(0.5);
textureCoords.push(1);
textureCoords.push(0.5);
var vertexPositionAttributeLocation = gl.getAttribLocation(shaderProgram, "aVertexPosition");
var textureCoordAttributeLocation = gl.getAttribLocation(shaderProgram, "aTextureCoord");
// Set vertex buffers
gl.enableVertexAttribArray(vertexPositionAttributeLocation);
var verticlesBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, verticlesBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verticles), gl.STATIC_DRAW);
gl.vertexAttribPointer(vertexPositionAttributeLocation, 2, gl.FLOAT, false, 0, 0);
var texture = this.getTextureFromPixelArray(textureData, textureWidth, textureHeight||1);
var vertexTextureCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexTextureCoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(textureCoords), gl.STATIC_DRAW);
gl.vertexAttribPointer(textureCoordAttributeLocation, 2, gl.FLOAT, false, 0, 0);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.uniform1i(gl.getUniformLocation(shaderProgram, "uSampler"), 0);
// Draw the segment
gl.drawArrays(gl.LINES, 0, verticles.length/2);
Here is my shaders:
Scene.fragmentShaderSrc = "\
precision mediump float;\
varying highp vec2 vTextureCoord;\
uniform sampler2D uSampler;\
void main(void) {gl_FragColor = texture2D(uSampler, vTextureCoord);}\
";
Scene.vertexShaderSrc = "\
attribute vec2 aVertexPosition;\
attribute vec2 aTextureCoord;\
uniform mat4 uPMatrix;\
uniform mat4 uModelView;\
varying highp vec2 vTextureCoord;\
void main(void) {\
gl_Position = uPMatrix * uModelView * vec4(aVertexPosition,0,1);\
vTextureCoord = aTextureCoord;\
}\
";
Can someone see where I made an error?
I needed to enable the texture coordinate attribute with :
gl.enableVertexAttribArray(textureCoordAttributeLocation);
before
var vertexTextureCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexTextureCoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(textureCoords), gl.STATIC_DRAW);
gl.vertexAttribPointer(textureCoordAttributeLocation, 2, gl.FLOAT, false, 0, 0);

Blend two canvases onto one with WebGL

What I'm trying to do is blend two canvases onto a single canvas for a drawing app I am creating. I know Javascript very well, but I really don't have any clue where to start with WebGL and since it isn't a very hard task to do, I'm assuming it would be yield quicker processing speeds if I don't use another library like Three.js or others of that sort.
What I already have are canvases that the user will be drawing on (Let's call them canvas A and B) which are both hidden and canvas C which is being shown.
<canvas id='C' width=800 height=600></canvas>
<canvas id='A' width=800 height=600 style='display:none'></canvas>
<canvas id='B' width=800 height=600 style='display:none'></canvas>
I already have the main drawing app done for the user to pick the layer to draw on and to draw on it, but how would I be able to use WebGL to blend the two layers together using some blend mode (ie: multiply) as the user continues to edit the canvases using WebGL?
At first I tried following the other post here: https://stackoverflow.com/a/11596922/1572938 but I got confused.
If someone wants to fill in the gaps on my jsfiddle for the other post that would work very well! http://jsfiddle.net/W3fVV/1/
There's an example of drawing with images here:
https://webglfundamentals.org/webgl/lessons/webgl-image-processing.html
WebGL doesn't care if the sources are images, canvases or video. So change the samples from
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, someImage);
to
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, someCanvas);
Then write a fragment shader to blend the 2 textures as in
precision mediump float;
// our 2 canvases
uniform sampler2D u_canvas1;
uniform sampler2D u_canvas2;
// the texCoords passed in from the vertex shader.
// note: we're only using 1 set of texCoords which means
// we're assuming the canvases are the same size.
varying vec2 v_texCoord;
void main() {
// Look up a pixel from first canvas
vec4 color1 = texture2D(u_canvas1, v_texCoord);
// Look up a pixel from second canvas
vec4 color2 = texture2D(u_canvas2, v_texCoord);
// return the 2 colors multiplied
gl_FragColor = color1 * color2;
}
You'll need to setup the 2 textures and tell your GLSL program which texture units you put them on.
function setupTexture(canvas, textureUnit, program, uniformName) {
var tex = gl.createTexture();
updateTextureFromCanvas(tex, canvas, textureUnit);
// Set the parameters so we can render any size image.
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
var location = gl.getUniformLocation(program, uniformName);
gl.uniform1i(location, textureUnit);
}
function updateTextureFromCanvas(tex, canvas, textureUnit) {
gl.activeTexture(gl.TEXTURE0 + textureUnit);
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, canvas);
}
var tex1 = setupTexture(canvas1, 0, program, "u_canvas1");
var tex2 = setupTexture(canvas2, 1, program, "u_canvas2");
Sample here:
function main() {
var canvas1 = document.getElementById("canvas1");
var canvas2 = document.getElementById("canvas2");
var ctx1 = canvas1.getContext("2d");
var ctx2 = canvas2.getContext("2d");
ctx1.fillStyle = "purple";
ctx1.arc(64, 64, 30, 0, Math.PI * 2, false);
ctx1.fill();
ctx2.fillStyle = "cyan";
ctx2.fillRect(50, 10, 28, 108);
// Get A WebGL context
var canvas = document.getElementById("webgl");
var gl = canvas.getContext("webgl");
if (!gl) {
return;
}
// setup GLSL program
var program = twgl.createProgramFromScripts(gl, ["2d-vertex-shader", "2d-fragment-shader"]);
gl.useProgram(program);
// look up where the vertex data needs to go.
var positionLocation = gl.getAttribLocation(program, "a_position");
var texCoordLocation = gl.getAttribLocation(program, "a_texCoord");
// provide texture coordinates for the rectangle.
var texCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0.0, 0.0,
1.0, 0.0,
0.0, 1.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(texCoordLocation);
gl.vertexAttribPointer(texCoordLocation, 2, gl.FLOAT, false, 0, 0);
// lookup uniforms
var resolutionLocation = gl.getUniformLocation(program, "u_resolution");
// set the resolution
gl.uniform2f(resolutionLocation, canvas1.width, canvas1.height);
// Create a buffer for the position of the rectangle corners.
var buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
// Set a rectangle the same size as the image.
setRectangle(gl, 0, 0, canvas.width, canvas.height);
function setupTexture(canvas, textureUnit, program, uniformName) {
var tex = gl.createTexture();
updateTextureFromCanvas(tex, canvas, textureUnit);
// Set the parameters so we can render any size image.
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
var location = gl.getUniformLocation(program, uniformName);
gl.uniform1i(location, textureUnit);
}
function updateTextureFromCanvas(tex, canvas, textureUnit) {
gl.activeTexture(gl.TEXTURE0 + textureUnit);
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, canvas);
}
var tex1 = setupTexture(canvas1, 0, program, "u_canvas1");
var tex2 = setupTexture(canvas2, 1, program, "u_canvas2");
// Draw the rectangle.
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
function setRectangle(gl, x, y, width, height) {
var x1 = x;
var x2 = x + width;
var y1 = y;
var y2 = y + height;
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
x1, y1,
x2, y1,
x1, y2,
x1, y2,
x2, y1,
x2, y2]), gl.STATIC_DRAW);
}
main();
canvas {
border: 2px solid black;
width: 128px;
height: 128px;
}
<script src="https://twgljs.org/dist/3.x/twgl.min.js"></script>
<!-- vertex shader -->
<script id="2d-vertex-shader" type="x-shader/x-vertex">
attribute vec2 a_position;
attribute vec2 a_texCoord;
uniform vec2 u_resolution;
varying vec2 v_texCoord;
void main() {
// convert the rectangle from pixels to 0.0 to 1.0
vec2 zeroToOne = a_position / u_resolution;
// convert from 0->1 to 0->2
vec2 zeroToTwo = zeroToOne * 2.0;
// convert from 0->2 to -1->+1 (clipspace)
vec2 clipSpace = zeroToTwo - 1.0;
gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);
// pass the texCoord to the fragment shader
// The GPU will interpolate this value between points.
v_texCoord = a_texCoord;
}
</script>
<!-- fragment shader -->
<script id="2d-fragment-shader" type="x-shader/x-fragment">
precision mediump float;
// our 2 canvases
uniform sampler2D u_canvas1;
uniform sampler2D u_canvas2;
// the texCoords passed in from the vertex shader.
// note: we're only using 1 set of texCoords which means
// we're assuming the canvases are the same size.
varying vec2 v_texCoord;
void main() {
// Look up a pixel from first canvas
vec4 color1 = texture2D(u_canvas1, v_texCoord);
// Look up a pixel from second canvas
vec4 color2 = texture2D(u_canvas2, v_texCoord);
// return the 2 colors multiplied
gl_FragColor = color1 * color2;
}
</script>
<!-- fragment shader -->
<script id="aa2d-fragment-shader" type="x-shader/x-fragment">
precision mediump float;
// our texture
uniform sampler2D u_canvas1;
uniform sampler2D u_canvas2;
// the texCoords passed in from the vertex shader.
varying vec2 v_texCoord;
void main() {
gl_FragColor = texture2D(u_canvas1, v_texCoord);
}
</script>
<canvas id="canvas1" width="128" height="128"></canvas>
<canvas id="canvas2" width="128" height="128"></canvas>
<canvas id="webgl" width="128" height="128"></canvas>
I think it will be simpler for you to just use the canvas 2D API.
You can first draw canvas A in canvas C then change canvas C global opacity before drawing canvas B in canvas C.
You can also change the way the canvas are blend using globalCompositeOperation
var canvasA = document.getElementById('C');
var canvasB = document.getElementById('C');
// The target canvas
var canvasC = document.getElementById('C');
var ctx = canvasC.getContext('2d');
ctx.drawImage(canvasA, 0,0);
ctx.globalAlpha = 0.5;
ctx.drawImage(canvasB,0, 0);

Categories

Resources