Need help rendering positions to a texture using framebuffers in vanilla webgl - javascript

I'm trying to do position updates for particles being rendered as points using webgl. I've asked some questions before about the same project I'm playing around with here and here which lead me a fair bit on the way. Unfortunately, most of the answers use twgl which, to me, takes a lot of shortcuts which I have a hard time understanding (so I didn't want to just try to copy it either but start with the basics).
Basically, I'm trying to render to a texture with one framebuffer + program and then use this texture in another program.
I don't know if I'm failing to render to the posTexture, of if the posTexture gets successfully rendered and that it fails to load into the renderProgram afterwards (since both are happening in the 'blackbox' GPU).
I made a snippet here without the renderFramebuffer (it simply renders directly to canvas instead) to show the problem. The core of the problem is at the end of the javascript bit, the rest is setup (which may be related):
function initShaderProgram(gl, vShader, fShader) {
const shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vShader);
gl.attachShader(shaderProgram, fShader);
gl.linkProgram(shaderProgram);
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
throw new Error('Unable to initiate webgl shaders. Breaking.');
}
return shaderProgram;
}
function loadShader(gl, type, source) {
const shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
let err = gl.getShaderInfoLog(shader);
gl.deleteShader(shader);
throw new Error(`Unable to compile shaders. ${err}`);
}
return shader;
}
const c = document.getElementById("c");
const gl = c.getContext('webgl2');
const amParticles = 1;
if (gl === null || gl === undefined) {
throw new Error('Unable to initiate webgl context. Breaking.');
}
// Extensions used for anti aliasing in rendering dots
let ext = gl.getExtension('EXT_color_buffer_float');
if (!ext) {
throw new Error("need EXT_color_buffer_float");
}
ext = gl.getExtension('EXT_float_blend');
if (!ext) {
throw new Error("need EXT_float_blend");
}
// Setup programs
const VsPos = document.getElementById("posVs").textContent;
const FsPos = document.getElementById("posFs").textContent;
const VsRender = document.getElementById("renderVs").textContent;
const FsRender = document.getElementById("renderFs").textContent;
const vShaderRender = loadShader(gl,
gl.VERTEX_SHADER, VsRender);
const vShaderPosUpd = loadShader(gl,
gl.VERTEX_SHADER, VsPos);
const fShaderRender = loadShader(gl,
gl.FRAGMENT_SHADER, FsRender);
const fShaderPosUpd = loadShader(gl,
gl.FRAGMENT_SHADER, FsPos);
// Setup shader
const renderProgram = initShaderProgram(gl,
vShaderRender, fShaderRender);
const posProgram = initShaderProgram(gl,
vShaderPosUpd, fShaderPosUpd);
// Setup global GL settings
gl.clearColor(0.0, 0.0, 0.0, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT);
// Blending to allow opacity (probably unrelated)
gl.enable(gl.BLEND);
gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
// Setup posTexture to render new positions to
let posTexture, posFrameBuffer; {
posTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, posTexture);
// Make texture non-mips
gl.texParameteri(gl.TEXTURE_2D,
gl.TEXTURE_WRAP_S,
gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D,
gl.TEXTURE_WRAP_T,
gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D,
gl.TEXTURE_MIN_FILTER,
gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D,
gl.TEXTURE_MAG_FILTER,
gl.NEAREST);
const level = 0;
const internalFormat = gl.RGBA32F;
const border = 0;
const format = gl.RGBA;
const type = gl.FLOAT;
// Example position pre-render
const data = new Float32Array([.5, .5, 0, 0]);
// height = 1, amount pixels = width, rgba = position
gl.texImage2D(gl.TEXTURE_2D,
level,
internalFormat,
amParticles,
1,
border,
format,
type,
data);
// Pos framebuffer
posFrameBuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER,
posFrameBuffer);
// Bind it to posTexture
gl.framebufferTexture2D(gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
posTexture,
level);
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) !==
gl.FRAMEBUFFER_COMPLETE) {
console.error(`Something went wrong with setting up the the posFrameBuffer. Status: ${
gl.checkFramebufferStatus(gl.FRAMEBUFFER)}`);
}
}
gl.useProgram(posProgram);
gl.bindFramebuffer(gl.FRAMEBUFFER, posFrameBuffer);
gl.viewport(0, 0, amParticles, 1);
// Now (it should be?) drawing new positions to
// texture posTexture
gl.drawArrays(gl.POINTS, 0, amParticles);
// Set new posTexture to texture unit 1
gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, posTexture);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.useProgram(renderProgram);
// Set uniform location to texture unit 1
const loc = gl.getUniformLocation(renderProgram, "t0_pos_tex");
gl.uniform1i(loc, 1);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
// Should draw with new position
gl.drawArrays(gl.POINTS, 0, amParticles);
#c {
width: 400px;
height: 200px;
}
.hide {
display: none;
}
<canvas id="c"></canvas>
<p>
If the circle is at the <b>left</b> side of the canvas, rendered by the posProgram, the experiment is successfull.
</p>
<div id="posFs" class="hide"># version 300 es
#define M_PI 3.1415927
precision highp float;
out vec4 outColor;
// Only renders one particle for the sake
// of the example to a predetermined position
void main() {
// New position to render to (should appear
// top-left ish)
float new_x = -.5;
float new_y = .5;
outColor = vec4(new_x, new_y, 0., 1.);
}
</div>
<div id="posVs" class="hide">#version 300 es
// Does nothing since the fragment shader sets
// the new position depending on the pixel
// which indicates which index of the texture
// = index of the new positions to update
void main() {}
</div>
<div id="renderVs" class="hide"># version 300 es
#define M_PI 3.1415927
uniform sampler2D t0_pos_tex;
out vec4 color;
void main() {
vec4 t0_pos = texelFetch(t0_pos_tex, ivec2(gl_VertexID, 0), 0);
gl_Position = vec4(t0_pos.x, t0_pos.y, 0., 1.);
color = vec4(1., 1., 1., 1.);
gl_PointSize = 50.0;
}
</div>
<div id="renderFs" class="hide"># version 300 es
precision highp float;
in vec4 color;
out vec4 outColor;
// Turns point into a circle and adds
// antialiasing to make it smoothly round
void main() {
float r = 0.0, delta = 0.0, alpha = 1.0;
vec2 cxy = 2.0 * gl_PointCoord - 1.0;
r = dot(cxy, cxy);
delta = fwidth(r);
alpha = 1.0 - smoothstep(1.0 - delta, 1.0 + delta, r);
outColor = color * alpha;
}
</div>

Your posVS vertex shader does nothing so nothing will be rendered by the fragment shader. In order to render something the vertex shader must either generate a point by setting gl_Position and gl_PointSize OR it must generate a line by being called twice and setting gl_Position to different values each time or a triangle by being called 3 times and setting gl_Position to different values each time so that's the first issue.
changed it to this
void main() {
// draw a single pixel
gl_PointSize = 1.0;
// in the center of the viewport
gl_Position = vec4(0, 0, 0, 1);
}
function initShaderProgram(gl, vShader, fShader) {
const shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vShader);
gl.attachShader(shaderProgram, fShader);
gl.linkProgram(shaderProgram);
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
throw new Error('Unable to initiate webgl shaders. Breaking.');
}
return shaderProgram;
}
function loadShader(gl, type, source) {
const shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
let err = gl.getShaderInfoLog(shader);
gl.deleteShader(shader);
throw new Error(`Unable to compile shaders. ${err}`);
}
return shader;
}
const c = document.getElementById("c");
const gl = c.getContext('webgl2');
const amParticles = 1;
if (gl === null || gl === undefined) {
throw new Error('Unable to initiate webgl context. Breaking.');
}
// Extensions used for anti aliasing in rendering dots
let ext = gl.getExtension('EXT_color_buffer_float');
if (!ext) {
throw new Error("need EXT_color_buffer_float");
}
ext = gl.getExtension('EXT_float_blend');
if (!ext) {
throw new Error("need EXT_float_blend");
}
// Setup programs
const VsPos = document.getElementById("posVs").textContent;
const FsPos = document.getElementById("posFs").textContent;
const VsRender = document.getElementById("renderVs").textContent;
const FsRender = document.getElementById("renderFs").textContent;
const vShaderRender = loadShader(gl,
gl.VERTEX_SHADER, VsRender);
const vShaderPosUpd = loadShader(gl,
gl.VERTEX_SHADER, VsPos);
const fShaderRender = loadShader(gl,
gl.FRAGMENT_SHADER, FsRender);
const fShaderPosUpd = loadShader(gl,
gl.FRAGMENT_SHADER, FsPos);
// Setup shader
const renderProgram = initShaderProgram(gl,
vShaderRender, fShaderRender);
const posProgram = initShaderProgram(gl,
vShaderPosUpd, fShaderPosUpd);
// Setup global GL settings
gl.clearColor(0.0, 0.0, 0.0, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT);
// Blending to allow opacity (probably unrelated)
gl.enable(gl.BLEND);
gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
// Setup posTexture to render new positions to
let posTexture, posFrameBuffer; {
posTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, posTexture);
// Make texture non-mips
gl.texParameteri(gl.TEXTURE_2D,
gl.TEXTURE_WRAP_S,
gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D,
gl.TEXTURE_WRAP_T,
gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D,
gl.TEXTURE_MIN_FILTER,
gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D,
gl.TEXTURE_MAG_FILTER,
gl.NEAREST);
const level = 0;
const internalFormat = gl.RGBA32F;
const border = 0;
const format = gl.RGBA;
const type = gl.FLOAT;
// Example position pre-render
const data = new Float32Array([.5, .5, 0, 0]);
// height = 1, amount pixels = width, rgba = position
gl.texImage2D(gl.TEXTURE_2D,
level,
internalFormat,
amParticles,
1,
border,
format,
type,
data);
// Pos framebuffer
posFrameBuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER,
posFrameBuffer);
// Bind it to posTexture
gl.framebufferTexture2D(gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
posTexture,
level);
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) !==
gl.FRAMEBUFFER_COMPLETE) {
console.error(`Something went wrong with setting up the the posFrameBuffer. Status: ${
gl.checkFramebufferStatus(gl.FRAMEBUFFER)}`);
}
}
gl.useProgram(posProgram);
gl.bindFramebuffer(gl.FRAMEBUFFER, posFrameBuffer);
gl.viewport(0, 0, amParticles, 1);
// Now (it should be?) drawing new positions to
// texture posTexture
gl.drawArrays(gl.POINTS, 0, amParticles);
// Set new posTexture to texture unit 1
gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, posTexture);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.useProgram(renderProgram);
// Set uniform location to texture unit 1
const loc = gl.getUniformLocation(renderProgram, "t0_pos_tex");
gl.uniform1i(loc, 1);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
// Should draw with new position
gl.drawArrays(gl.POINTS, 0, amParticles);
#c {
width: 400px;
height: 200px;
}
.hide {
display: none;
}
<canvas id="c"></canvas>
<p>
If the circle is at the <b>left</b> side of the canvas, rendered by the posProgram, the experiment is successfull.
</p>
<div id="posFs" class="hide"># version 300 es
#define M_PI 3.1415927
precision highp float;
out vec4 outColor;
// Only renders one particle for the sake
// of the example to a predetermined position
void main() {
// New position to render to (should appear
// top-left ish)
float new_x = -.5;
float new_y = .5;
outColor = vec4(new_x, new_y, 0., 1.);
}
</div>
<div id="posVs" class="hide">#version 300 es
void main() {
// draw a single pixel
gl_PointSize = 1.0;
// in the center of the viewport
gl_Position = vec4(0, 0, 0, 1);
}
</div>
<div id="renderVs" class="hide"># version 300 es
#define M_PI 3.1415927
uniform sampler2D t0_pos_tex;
out vec4 color;
void main() {
vec4 t0_pos = texelFetch(t0_pos_tex, ivec2(gl_VertexID, 0), 0);
gl_Position = vec4(t0_pos.x, t0_pos.y, 0., 1.);
color = vec4(1., 1., 1., 1.);
gl_PointSize = 50.0;
}
</div>
<div id="renderFs" class="hide"># version 300 es
precision highp float;
in vec4 color;
out vec4 outColor;
// Turns point into a circle and adds
// antialiasing to make it smoothly round
void main() {
float r = 0.0, delta = 0.0, alpha = 1.0;
vec2 cxy = 2.0 * gl_PointCoord - 1.0;
r = dot(cxy, cxy);
delta = fwidth(r);
alpha = 1.0 - smoothstep(1.0 - delta, 1.0 + delta, r);
outColor = color * alpha;
}
</div>
But, I suggest you spend a couple of minutes to try to understand the example linked. Yes it uses TWGL because the point of explaining how to do particles does not also want to be a tutorial on the entirely of WebGL. It should be pretty obvious what twgl.createTexture does just by looking at the inputs. Similarly with, twgl.createFramebufferInfo and twgl.createBufferInfoFromArrays if not obvious are probably just a few seconds away from understanding. twgl.setBuffersAndAttributes and twgl.setUniforms do exactly what they say. If you've done either of those things manually in webgl it should be pretty clear what it means to "set buffers and attributes" and to "set uniforms". All that's left is twgl.drawBufferInfo
In any case, it's going to be slower computing the new particle positions using gl.POINTS, one point per particle, rather than drawing a quad with N pixels, one for pixel for each point. Drawing the particles you might use gl.POINTS but not updating the positions.
One other note: like you print the shader info log when compiling fails you probably want to print the program info log when linking fails. There are plenty of errors that only happen during linking.

Related

WebGl Triangle Rotation Problems: Triangle change size and get skewed

Dear Firend : I am new at WebGl. I have managed to draw and rotate a triangle
-- but the problem is that the triangle
1, The triangle change size and shape while drawing.
2, I dont know how to rotate the triangle around one of its cornor.
Following is the code. I have written the code in a linear fashion with a utility class (GlUtil) that wraps up the boring tasks.
I am using a function called perc2glCoord that allow me to enter percentages and convert them into gl coordinates.
import GlUtil from "./glUtil/glUtil.js";
import perc2glCoord from "./functions/perc2glCoord.js";
const gl = GlUtil.getGl("bilza");
console.log(gl);
const vertices = [
perc2glCoord (50) ,perc2glCoord (50), 1,0,0,
perc2glCoord (50) ,perc2glCoord (75), 0,1,0,
perc2glCoord (75) ,perc2glCoord (50), 0,0,1,
// perc2glCoord (25) ,perc2glCoord (50), 1,0,0,
// perc2glCoord (75) ,perc2glCoord (50), 0,1,0,
// perc2glCoord (50) ,perc2glCoord (75), 0,0,1,
];
const vertexShaderSrc =
`
attribute highp vec2 a_pos;
attribute highp vec3 a_clr;
uniform float translateX;
uniform float translateY;
uniform float angle;
varying highp vec3 vColor;
void main(void) {
gl_Position = vec4(
translateX + (a_pos.x * cos(angle) - a_pos.y * sin(angle)),
translateY + (a_pos.x * sin(angle) + a_pos.y * cos(angle)),
1.0,
1.0 );
vColor = a_clr;
}
`;
const fragShaderSrc =
`
varying highp vec3 vColor;
void main(void) {
gl_FragColor = vec4 (vColor , 1.0);
}
`;
const vertexShader = GlUtil.createShader(gl,vertexShaderSrc,gl.VERTEX_SHADER);
const fragmentShader = GlUtil.createShader(gl,fragShaderSrc,gl.FRAGMENT_SHADER);
const programe = GlUtil.getProgram(gl,vertexShader,fragmentShader);
const VOB = GlUtil.getBuffer(gl);
GlUtil.bindBuffer(gl,VOB,vertices);
GlUtil.linkNuseProgram(gl,programe);
let angleValue = 0;
function draw(){
GlUtil.setAttribute(gl,"a_pos",programe, 2 ,4*5,0);
GlUtil.setAttribute(gl,"a_clr",programe, 3 , 4*5,2 * 4);
const translateXLoc = gl.getUniformLocation(programe, "translateX");
gl.uniform1f(translateXLoc,0.0);
const translateYLoc = gl.getUniformLocation(programe, "translateY");
gl.uniform1f(translateYLoc,0.0);
const angleLoc = gl.getUniformLocation(programe, "angle");
const rands = Math.PI * angleValue /180;
gl.uniform1f(angleLoc,rands);
angleValue+= 0.1;
/////////////////////---draw-----------------
GlUtil.clear(gl,0.1,0.1,0.2);
gl.drawArrays(gl.TRIANGLES , 0, 3);
requestAnimationFrame(draw);
}
draw();
---
Here is the GlUtil helper object
export default class GlUtil {
static getGl(canvasId :string ="bilza"):WebGLRenderingContext{
const canvas = document.getElementById(canvasId) as HTMLCanvasElement;
if (canvas == null){
throw new Error("canvas not found");
}
const gl = canvas.getContext("webgl");
if (gl == null) {
throw new Error("Unable to initialize WebGL. Your browser or machine may not support it.");
}
//---Got gl
return gl;
}
static getProgram(gl :WebGLRenderingContext,vshader:WebGLShader, fshader :WebGLShader) :WebGLProgram {
const pgm = gl.createProgram();
if (pgm == null){throw new Error("failed to create program");}
//-----------
gl.attachShader(pgm, vshader);
gl.attachShader(pgm, fshader);
//-------------
// pgm.vertexPosAttrib = gl.getAttribLocation( pgm , 'pos');
// this.gl.useProgram(this.program);
return pgm;
}
static getBuffer(gl :WebGLRenderingContext):WebGLBuffer{
let b = gl.createBuffer();
if (b == null){throw("failed to create buffer");}
return b;
}
static createShader(gl :WebGLRenderingContext, shaderSource :string, shaderType:number):WebGLShader {
var shader = gl.createShader(shaderType);
if (shader == null){
throw new Error("shaders could not be created");
}
gl.shaderSource(shader, shaderSource);
gl.compileShader(shader);
let compiled = gl.getShaderParameter(shader, gl.COMPILE_STATUS);
if (!compiled) {
// There are errors, so display them
var errors = gl.getShaderInfoLog(shader);
console.log('Failed to compile with these errors:' + "type:" + shaderType, errors );
}
return shader;
}
static bindBuffer(gl :WebGLRenderingContext,buff :WebGLBuffer,buffData :number[]){
gl.bindBuffer(gl.ARRAY_BUFFER, buff);
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array(buffData),
gl.STATIC_DRAW);
}
static linkNuseProgram(gl :WebGLRenderingContext,prgrm :WebGLProgram){
gl.linkProgram(prgrm);
gl.useProgram(prgrm);
}
static clear(gl :WebGLRenderingContext,r:number=0,g:number=0,b:number=0,a:number=1){
gl.clearColor(r,g,b,a);
gl.clear(gl.COLOR_BUFFER_BIT);
}
////////////////////////////////////////////////////
static setAttribute(gl :WebGLRenderingContext,nameStr :string,programe :WebGLProgram,numberOfComps :number,stride:number, offset :number=0){
const vertexPosAttrib = gl.getAttribLocation( programe, `${nameStr}`);
gl.enableVertexAttribArray( vertexPosAttrib);
gl.vertexAttribPointer(
vertexPosAttrib, //index
numberOfComps, //number of components =2 x and y
gl.FLOAT, //data type
false, //normalized
stride , //stride - the comple vertex row bytes
offset //offset = 0
);
}
///////////////////////////////////////////////
}
Anf finally here is the picture of the triangle
Tried all the examples and help that I could find on the internet including re-learning the math
Since GL's drawing-area is stretched to fit the viewport, you need to take into account aspect ratio of the view in gl_Position calculation.
For example, appending aspectRatio parameter to the vertex shader,
uniform float aspectRatio;
:
gl_Position = vec4(
(translateX + (a_pos.x * cos(angle) - a_pos.y * sin(angle))) / aspectRatio,
translateY + (a_pos.x * sin(angle) + a_pos.y * cos(angle)),
1.0,
1.0 );
pass the viewport aspect-ratio as follows.
const viewport = gl.getParameter(gl.VIEWPORT); // [x, y, width, height]
const aspectRatioLoc = gl.getUniformLocation(programe, "aspectRatio");
gl.uniform1f(aspectRatioLoc, viewport[2] / viewport[3]);

Support for Stencil Buffer with WebGL

Initializing webgl with canvas.getContext("webgl", {stencil : true}) requests a stencil buffer, but not all browsers will actually give you one (for me, Firefox 79.0 on Ubuntu 20.04 LTS doesn't works but Chrome 84.0.4147.89 does. My graphics card is NVIDIA RTX 2060, I'm using the nvidia-driver-440-server driver).
I would like to know how widely supported stencil buffers are, but I can't find information about what browsers are supported. The functions like glStencilOp, which are the only things I can find support information for, can still be used, they just don't do anything with 0 stencil bits.
Is there a list of browsers that support this feature?
Honestly that sounds like a bug in firefox although given the spec lets an implementation fail to provide a stencil buffer on the canvas for any reason whatsoever it's not technically a bug. I would consider filling one. Test with a Chromium browser just to check that this is Firefox choosing not to provide a stencil buffer and not a driver issue or something else.
You should be able to always make a DEPTH_STENCIL renderbuffer. There is no version of WebGL that allows an implementation to not support that. So, you can work around the bug by rendering to a texture + depth stencil renderbuffer attached to a framebuffer and then render the framebuffer color texture to the canvas.
Here's a test. you should see a red square with the bottom right corner green. that will be inside a blue square which is inside a purple square.
The blue square is to show the extents of the framebuffer texture. If the green square was not being masked by the stencil buffer it would bleed into the blue.
The purple square is to show the size of the canvas and that we are drawing the framebuffer texture smaller than the full canvas. This is all just to show that stencil buffers work on your machine. For your own solution you'd want to draw a quad made out of vertices instead of using points like below, and you'd want to make the texture and renderbuffer attached to the frame buffer the same size as your canvas.
"use strict";
function main() {
const gl = document.querySelector("canvas").getContext("webgl");
const vs = `
attribute vec4 position;
void main() {
gl_Position = position;
gl_PointSize = 64.0;
}
`;
const fs = `
precision mediump float;
uniform sampler2D tex;
void main() {
gl_FragColor = texture2D(tex, gl_PointCoord.xy);
}
`;
const program = twgl.createProgram(gl, [vs, fs]);
const posLoc = gl.getAttribLocation(program, "position");
// Create a texture to render to
const targetTextureWidth = 128;
const targetTextureHeight = 128;
const targetTexture = createTexture(gl);
{
// define size and format of level 0
const level = 0;
const internalFormat = gl.RGBA;
const border = 0;
const format = gl.RGBA;
const type = gl.UNSIGNED_BYTE;
const data = null;
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
targetTextureWidth, targetTextureHeight, border,
format, type, data);
}
// Create and bind the framebuffer
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
// attach the texture as the first color attachment
const attachmentPoint = gl.COLOR_ATTACHMENT0;
const level = 0;
gl.framebufferTexture2D(gl.FRAMEBUFFER, attachmentPoint, gl.TEXTURE_2D, targetTexture, level);
// create a depth-stencil renderbuffer
const depthStencilBuffer = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, depthStencilBuffer);
// make a depth-stencil buffer and the same size as the targetTexture
gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_STENCIL, targetTextureWidth, targetTextureHeight);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_STENCIL_ATTACHMENT, gl.RENDERBUFFER, depthStencilBuffer);
function createTexture(gl, color) {
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
// set the filtering so we don't need mips
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
if (color) {
gl.texImage2D(
gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0,
gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array(color));
}
return tex;
}
// create a red texture and a green texture
const redTex = createTexture(gl, [255, 0, 0, 255]);
const greenTex = createTexture(gl, [0, 255, 0, 255]);
gl.enable(gl.STENCIL_TEST);
gl.useProgram(program);
gl.clearColor(0, 0, 1, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.bindTexture(gl.TEXTURE_2D, redTex);
gl.stencilFunc(
gl.ALWAYS, // the test
1, // reference value
0xFF, // mask
);
gl.stencilOp(
gl.KEEP, // what to do if the stencil test fails
gl.KEEP, // what to do if the depth test fails
gl.REPLACE, // what to do if both tests pass
);
// draw a 64x64 pixel red rect in middle
gl.drawArrays(gl.POINTS, 0, 1);
gl.stencilFunc(
gl.EQUAL, // the test
1, // reference value
0xFF, // mask
);
gl.stencilOp(
gl.KEEP, // what to do if the stencil test fails
gl.KEEP, // what to do if the depth test fails
gl.KEEP, // what to do if both tests pass
);
// draw a green 64x64 pixel square in the
// upper right corner. The stencil will make
// it not go outside the red square
gl.vertexAttrib2f(posLoc, 0.5, 0.5);
gl.bindTexture(gl.TEXTURE_2D, greenTex);
gl.drawArrays(gl.POINTS, 0, 1);
// draw the framebuffer's texture to
// the canvas. we should see a 32x32
// red square with the bottom right corner
// green showing the stencil worked. That will
// be surrounded by blue to show the texture
// we were rendering to is larger than the
// red square. And that will be surrounded
// by purple since we're drawing a 64x64
// point on a 128x128 canvas which we clear
// purple.
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.clearColor(1, 0, 1, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.vertexAttrib2f(posLoc, 0.0, 0.0);
gl.bindTexture(gl.TEXTURE_2D, targetTexture);
gl.drawArrays(gl.POINTS, 0, 1);
}
main();
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas width="128" height="128"></canvas>
If you change the renderbuffer format to DEPTH_COMPONENT16 and the attachment point to DEPTH_ATTACHMENT then you'll see green square is no longer masked by the stencil
"use strict";
function main() {
const gl = document.querySelector("canvas").getContext("webgl");
const vs = `
attribute vec4 position;
void main() {
gl_Position = position;
gl_PointSize = 64.0;
}
`;
const fs = `
precision mediump float;
uniform sampler2D tex;
void main() {
gl_FragColor = texture2D(tex, gl_PointCoord.xy);
}
`;
const program = twgl.createProgram(gl, [vs, fs]);
const posLoc = gl.getAttribLocation(program, "position");
// Create a texture to render to
const targetTextureWidth = 128;
const targetTextureHeight = 128;
const targetTexture = createTexture(gl);
{
// define size and format of level 0
const level = 0;
const internalFormat = gl.RGBA;
const border = 0;
const format = gl.RGBA;
const type = gl.UNSIGNED_BYTE;
const data = null;
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
targetTextureWidth, targetTextureHeight, border,
format, type, data);
}
// Create and bind the framebuffer
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
// attach the texture as the first color attachment
const attachmentPoint = gl.COLOR_ATTACHMENT0;
const level = 0;
gl.framebufferTexture2D(gl.FRAMEBUFFER, attachmentPoint, gl.TEXTURE_2D, targetTexture, level);
// create a depth-stencil renderbuffer
const depthStencilBuffer = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, depthStencilBuffer);
// make a depth-stencil buffer and the same size as the targetTexture
gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_COMPONENT16, targetTextureWidth, targetTextureHeight);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.RENDERBUFFER, depthStencilBuffer);
function createTexture(gl, color) {
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
// set the filtering so we don't need mips
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
if (color) {
gl.texImage2D(
gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0,
gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array(color));
}
return tex;
}
// create a red texture and a green texture
const redTex = createTexture(gl, [255, 0, 0, 255]);
const greenTex = createTexture(gl, [0, 255, 0, 255]);
gl.enable(gl.STENCIL_TEST);
gl.useProgram(program);
gl.clearColor(0, 0, 1, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.bindTexture(gl.TEXTURE_2D, redTex);
gl.stencilFunc(
gl.ALWAYS, // the test
1, // reference value
0xFF, // mask
);
gl.stencilOp(
gl.KEEP, // what to do if the stencil test fails
gl.KEEP, // what to do if the depth test fails
gl.REPLACE, // what to do if both tests pass
);
// draw a 64x64 pixel red rect in middle
gl.drawArrays(gl.POINTS, 0, 1);
gl.stencilFunc(
gl.EQUAL, // the test
1, // reference value
0xFF, // mask
);
gl.stencilOp(
gl.KEEP, // what to do if the stencil test fails
gl.KEEP, // what to do if the depth test fails
gl.KEEP, // what to do if both tests pass
);
// draw a green 64x64 pixel square in the
// upper right corner. The stencil will make
// it not go outside the red square
gl.vertexAttrib2f(posLoc, 0.5, 0.5);
gl.bindTexture(gl.TEXTURE_2D, greenTex);
gl.drawArrays(gl.POINTS, 0, 1);
// draw the framebuffer's texture to
// the canvas. we should see a 32x32
// red square with the bottom right corner
// green showing the stencil worked. That will
// be surrounded by blue to show the texture
// we were rendering to is larger than the
// red square. And that will be surrounded
// by purple since we're drawing a 64x64
// point on a 128x128 canvas which we clear
// purple.
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.clearColor(1, 0, 1, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.vertexAttrib2f(posLoc, 0.0, 0.0);
gl.bindTexture(gl.TEXTURE_2D, targetTexture);
gl.drawArrays(gl.POINTS, 0, 1);
}
main();
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas width="128" height="128"></canvas>
You're supposed to be able to call gl.getContextAttributes to check if you got a stencil buffer or not so you can use the suggested solution if it tells you you didn't get a stencil buffer on the canvas.

Why setting format in gl.texImage2D to gl.LUMINANCE instead of gl.RGB makes the blob made out of the canvas only ~5% smaller in filesize?

While exploring javascript, I encountered a question that is quite baffling. The preface is: I convert images of different mime types (mostly pngs/jpgs) into bitmaps with ImageBitmap interface, then transfer them to worker to convert in separate thread into blob(to do so I firstly draw them into offscreen canvas context) and then save into IDB, while main thread continues to load new images. While doing so, to broaden my horizons, I decided to use webgl2 rendering context in the canvas since GL is something I never touched.
To apply bitmap to canvas I use texImage2D function, which I seem to not understand. There I can specify format of data stored in memory being presented to GLSL (it should be rgb(right?) since bitmap was created with no alpha premultiplying), internal format and type. Since the combinations of format/internal format/type are specified by spec, I tried to make use of their multitude and chose the best(quality-/filesize-wise) for my purposes. Since images being converted to bitmap are mostly black and white, I thought that luminance is what I need. But first I used standard RGB format:
gl.texImage2D(
gl.TEXTURE_2D, 0, gl.RGB, bitmap.width, bitmap.height, 0, gl.RGB, gl.UNSIGNED_BYTE, bitmap
);
Then I used RGB565 with UNSIGNED_SHORT_5_6_5 data type and didn't see any quality losses while blob size was decreased by ~30% from RGB. How I understand, it decreased because RGB565 is 2 unsigned short bytes per pixel, right? Then I used UNSIGNED_SHORT_5_5_5_1 RGBA and blob file size compared to standard RGB was decreased by ~43%. Even less then RGB565! But gradients on images became wonky so no 5551RGBA for me. The big difference in size between 5551 RGBA and RGB565 is something I don't understand. And what is more confusing is when using Luminance according to spec type/format/internal format combination, the decrease from standard RGB is only ~5%. Why did RGB565 decreased size for whooping ~30% while luma for a mere ~5%?
For all this I used the same floating-point sampler in fragment shader:
#version 300 es
precision mediump float;
precision mediump sampler2D;
uniform sampler2D sampler;
uniform vec2 dimensions;
out vec4 color;
void main(){
color = texture(sampler, vec2(gl_FragCoord.x/dimensions.x, 1.0 - (gl_FragCoord.y/dimensions.y)));
}
Also the same pixelStorei and texParameteri:
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, false);
gl.pixelStorei(gl.UNPACK_COLORSPACE_CONVERSION_WEBGL, gl.NONE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
As shows snippet below, the luma doesn't change the file size of blob if image is black and white, while if colored the decrease is apparent, though still smaller then RGBA4. Quite counterintuitive considering RGBA4 has 2 bytes per pixel, while LUMA - 1.
(async() => {
function createImage(src) {
return new Promise((rs, rj) => {
var img = new Image();
img.crossOrigin = 'anonymous';
img.src = src;
img.onload = () => rs(img);
img.onerror = e => rj(e);
});
};
var jpeg = await createImage('https://upload.wikimedia.org/wikipedia/commons/a/aa/5inchHowitzerFiringGallipoli1915.jpeg');
var png = await createImage('https://upload.wikimedia.org/wikipedia/commons/2/2c/6.d%C3%ADl_html_m2fdede78.png');
var jpgClr = await createImage('https://upload.wikimedia.org/wikipedia/commons/thumb/e/ed/%22Good_bye%2C_sweetheart%22%2C_tobacco_label%2C_ca._1865.jpg/117px-%22Good_bye%2C_sweetheart%22%2C_tobacco_label%2C_ca._1865.jpg');
var format = {
standard: {
internalFormat: 'RGB8',
format: 'RGB',
type: 'UNSIGNED_BYTE',
},
rgb565: {
internalFormat: 'RGB565',
format: 'RGB',
type: 'UNSIGNED_SHORT_5_6_5',
},
rgb9e5: {
internalFormat: 'RGB9_E5',
format: 'RGB',
type: 'FLOAT',
},
srgb: {
internalFormat: 'SRGB8',
format: 'RGB',
type: 'UNSIGNED_BYTE',
},
rgba32f: {
internalFormat: 'RGB32F',
format: 'RGB',
type: 'FLOAT',
},
rgba4: {
internalFormat: 'RGBA4',
format: 'RGBA',
type: 'UNSIGNED_SHORT_4_4_4_4',
},
rgb5a1: {
internalFormat: 'RGB5_A1',
format: 'RGBA',
type: 'UNSIGNED_SHORT_5_5_5_1',
},
luma: {
internalFormat: 'LUMINANCE',
format: 'LUMINANCE',
type: 'UNSIGNED_BYTE',
},
};
function compareFormatSize(image) {
return new Promise((r, _) => {
createImageBitmap(image, {
premultiplyAlpha: 'none',
colorSpaceConversion: 'none',
}).then(async bitmap => {
var text = String(image.src.match(/(?<=\.)\w{3,4}$/)).toUpperCase();
console.log(`${text === 'JPG' ? 'Colored jpg' : text}:`);
for (let val of Object.values(format)) {
await logBlobSize(bitmap, val);
if(val.format === 'LUMINANCE') r();
}
}).catch(console.warn);
});
};
compareFormatSize(jpeg).then(_ => compareFormatSize(png)).then(_ => compareFormatSize(jpgClr));
function logBlobSize(bitmap, { internalFormat, format, type }) {
return new Promise(r => {
drawCanvas(bitmap, internalFormat, format, type).convertToBlob({
type: `image/webp`
}).then(blob => { console.log(`Blob from ${internalFormat} is ${blob.size}b`); r(); });
})
}
function drawCanvas(bitmap, internalFormat, format, type) {
const gl = (new OffscreenCanvas(bitmap.width, bitmap.height)).getContext("webgl2", {
antialias: false,
alpha: false,
depth: false,
});
function createShader(gl, type, glsl) {
const shader = gl.createShader(type);
gl.shaderSource(shader, glsl)
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
console.error(gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return;
}
return shader;
}
const vs = createShader(
gl,
gl.VERTEX_SHADER,
`#version 300 es
#define POSITION_LOCATION 0
layout(location = POSITION_LOCATION) in vec2 position;
void main()
{
gl_Position = vec4(position, 0.0, 1.0);
}`,
);
const fs = createShader(
gl,
gl.FRAGMENT_SHADER,
`#version 300 es
precision mediump float;
precision mediump sampler2D;
uniform sampler2D sampler;
uniform vec2 dimensions;
out vec4 color;
void main()
{
color = texture(sampler, vec2(gl_FragCoord.x/dimensions.x, 1.0 - (gl_FragCoord.y/dimensions.y)));
}`,
);
const program = gl.createProgram();
gl.attachShader(program, vs);
gl.attachShader(program, fs);
gl.linkProgram(program);
const sampler = gl.getUniformLocation(program, 'sampler');
const dimensions = gl.getUniformLocation(program, 'dimensions');
const position = 0; // GLSL location
const vao = gl.createVertexArray();
gl.bindVertexArray(vao);
gl.enableVertexAttribArray(position);
const vxBuffer = gl.createBuffer();
const vertices = new Float32Array([
-1.0,-1.0,
1.0,-1.0,
-1.0, 1.0,
1.0, 1.0,
]);
gl.bindBuffer(gl.ARRAY_BUFFER, vxBuffer);
gl.vertexAttribPointer(position, 2, gl.FLOAT, false, 0, 0);
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
const texture = gl.createTexture();
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, false);
gl.pixelStorei(gl.UNPACK_COLORSPACE_CONVERSION_WEBGL, gl.NONE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl[internalFormat],
bitmap.width,
bitmap.height,
0,
gl[format],
gl[type],
bitmap
);
gl.useProgram(program);
gl.uniform1i(sampler, 0);
gl.uniform2f(dimensions, gl.canvas.width, gl.canvas.height);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
gl.deleteTexture(texture);
gl.deleteVertexArray(vao);
gl.deleteBuffer(vxBuffer);
gl.deleteProgram(program);
return gl.canvas;
}
})()
Thanks in advance!
The canvas is always RGBA 8bit (32bit color). There is talk of adding options for having a deeper depth canvas to support HD color displays but that hasn't shipped.
So, calling canvas.converToBlob is always going to give you an RGBA32bit png (or jpeg). You create a LUMIANCE texture will give you a black and white texture but it gets drawn into an RGBA 32bit canvas. There is no option to get a 1 channel PNG.
As for RGB565, RGBA5551 etc those formats may or may not be supported directly by the hardware, the spec allows the driver to choose a format that is a higher resolution and I'm guessing most desktops expand the data into RGBA8 when you upload the data so it won't save any memory.
On the other hand uploading as RGB565 or RGBA5551 the WebGL spec requires that when you pass an image that the image is first converted to that format, so the browser is going to take your image and effectively quantize it down to those color depths which means you're loosing colors. You then draw the quantized image back to the canvas and save so of course it's likely to compress better since there are more similar colors.
From the WebGL spec for the version of texImage2D that takes an ImageBitmap
The source image data is conceptually first converted to the data type and format specified by the format and type arguments, and then transferred to the WebGL implementation. Format conversion is performed according to the following table. If a packed pixel format is specified which would imply loss of bits of precision from the image data, this loss of precision must occur.
Let's try it without WebGL
(async() => {
function createImage(src) {
return new Promise((rs, rj) => {
const img = new Image();
img.crossOrigin = 'anonymous';
img.src = src;
img.onload = () => rs(img);
img.onerror = rj;
});
};
const jpeg = await createImage('https://upload.wikimedia.org/wikipedia/commons/a/aa/5inchHowitzerFiringGallipoli1915.jpeg');
const png = await createImage('https://upload.wikimedia.org/wikipedia/commons/2/2c/6.d%C3%ADl_html_m2fdede78.png');
const jpgClr = await createImage('https://upload.wikimedia.org/wikipedia/commons/thumb/e/ed/%22Good_bye%2C_sweetheart%22%2C_tobacco_label%2C_ca._1865.jpg/117px-%22Good_bye%2C_sweetheart%22%2C_tobacco_label%2C_ca._1865.jpg');
const format = {
standard: {
internalFormat: 'RGB8',
format: 'RGB',
type: 'UNSIGNED_BYTE',
fn: p => [p[0], p[1], p[2], 255],
},
rgb565: {
internalFormat: 'RGB565',
format: 'RGB',
type: 'UNSIGNED_SHORT_5_6_5',
fn: p => [
(p[0] >> 3) * 255 / 31,
(p[1] >> 2) * 255 / 63,
(p[2] >> 3) * 255 / 31,
255,
],
},
rgba4: {
internalFormat: 'RGBA4',
format: 'RGBA',
type: 'UNSIGNED_SHORT_4_4_4_4',
fn: p => [
(p[0] >> 4) * 255 / 15,
(p[1] >> 4) * 255 / 15,
(p[2] >> 4) * 255 / 15,
(p[3] >> 4) * 255 / 15,
],
},
rgb5a1: {
internalFormat: 'RGB5_A1',
format: 'RGBA',
type: 'UNSIGNED_SHORT_5_5_5_1',
fn: p => [
(p[0] >> 3) * 255 / 31,
(p[1] >> 3) * 255 / 31,
(p[2] >> 3) * 255 / 31,
(p[3] >> 7) * 255 / 1,
],
},
luma: {
internalFormat: 'LUMINANCE',
format: 'LUMINANCE',
type: 'UNSIGNED_BYTE',
fn: p => [p[0], p[0], p[0], 255],
},
};
async function compareFormatSize(image) {
const bitmap = await createImageBitmap(image, {
premultiplyAlpha: 'none',
colorSpaceConversion: 'none',
});
const text = String(image.src.match(/(?<=\.)\w{3,4}$/)).toUpperCase();
log(`${text === 'JPG' ? 'Colored jpg' : text}:`);
for (const val of Object.values(format)) {
await logBlobSize(bitmap, val);
}
};
await compareFormatSize(jpeg);
await compareFormatSize(png);
await compareFormatSize(jpgClr);
async function logBlobSize(bitmap, {
internalFormat,
format,
type,
fn,
}) {
const canvas = drawCanvas(bitmap, internalFormat, format, type);
const blob = await canvas.convertToBlob({
type: `image/webp`
});
const canvas2 = drawFn(bitmap, fn);
const blob2 = await canvas2.convertToBlob({
type: `image/webp`
});
log(`Blob from ${internalFormat} is ${blob.size}b(webgl) vs ${blob2.size}b(code)`);
if (false) {
const img = new Image();
img.src = URL.createObjectURL(blob);
document.body.appendChild(img);
const img2 = new Image();
img2.src = URL.createObjectURL(blob2);
document.body.appendChild(img2);
}
}
function drawFn(bitmap, fn) {
const ctx = (new OffscreenCanvas(bitmap.width, bitmap.height)).getContext("2d");
ctx.drawImage(bitmap, 0, 0);
const imageData = ctx.getImageData(0, 0, bitmap.width, bitmap.height);
const pixels = imageData.data;
for (let i = 0; i < pixels.length; i += 4) {
const n = fn(pixels.subarray(i, i + 4));
pixels.set(n, i);
}
ctx.putImageData(imageData, 0, 0);
return ctx.canvas;
}
function drawCanvas(bitmap, internalFormat, format, type) {
const gl = (new OffscreenCanvas(bitmap.width, bitmap.height)).getContext("webgl2", {
antialias: false,
alpha: false,
depth: false,
});
function createShader(gl, type, glsl) {
const shader = gl.createShader(type);
gl.shaderSource(shader, glsl)
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
console.error(gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return;
}
return shader;
}
const vs = createShader(
gl,
gl.VERTEX_SHADER,
`#version 300 es
#define POSITION_LOCATION 0
layout(location = POSITION_LOCATION) in vec2 position;
void main()
{
gl_Position = vec4(position, 0.0, 1.0);
}`,
);
const fs = createShader(
gl,
gl.FRAGMENT_SHADER,
`#version 300 es
precision mediump float;
precision mediump sampler2D;
uniform sampler2D sampler;
uniform vec2 dimensions;
out vec4 color;
void main()
{
color = texture(sampler, vec2(gl_FragCoord.x/dimensions.x, 1.0 - (gl_FragCoord.y/dimensions.y)));
}`,
);
const program = gl.createProgram();
gl.attachShader(program, vs);
gl.attachShader(program, fs);
gl.linkProgram(program);
const sampler = gl.getUniformLocation(program, 'sampler');
const dimensions = gl.getUniformLocation(program, 'dimensions');
const position = 0; // GLSL location
const vao = gl.createVertexArray();
gl.bindVertexArray(vao);
gl.enableVertexAttribArray(position);
const vxBuffer = gl.createBuffer();
const vertices = new Float32Array([-1.0, -1.0,
1.0, -1.0, -1.0, 1.0,
1.0, 1.0,
]);
gl.bindBuffer(gl.ARRAY_BUFFER, vxBuffer);
gl.vertexAttribPointer(position, 2, gl.FLOAT, false, 0, 0);
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
const texture = gl.createTexture();
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, false);
gl.pixelStorei(gl.UNPACK_COLORSPACE_CONVERSION_WEBGL, gl.NONE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl[internalFormat],
bitmap.width,
bitmap.height,
0,
gl[format],
gl[type],
bitmap
);
gl.useProgram(program);
gl.uniform1i(sampler, 0);
gl.uniform2f(dimensions, gl.canvas.width, gl.canvas.height);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
gl.deleteTexture(texture);
gl.deleteVertexArray(vao);
gl.deleteBuffer(vxBuffer);
gl.deleteProgram(program);
return gl.canvas;
}
})()
function log(...args) {
const elem = document.createElement('pre');
elem.textContent = [...args].join(' ');
document.body.appendChild(elem);
}
pre { margin: 0; }
Why setting format in gl.texImage2D to gl.LUMINANCE instead of gl.RGB makes the blob made out of the canvas only ~5% smaller in filesize?
I'm not seeing these results. In your example the black and white images stay the same size as RGB vs LUMIANCE. The color image becomes 1/2 size. But of course it depends on the compression algorithm whether or not a black and white 32bit image gets compressed smaller than a color 32bit image since in all cases the canvas is 32bits when convertToBlob is called.

Unable to load texture in WebGl

I am able to load RGB colours but not textures. If it could be some settings problem please prompt me.
This is the screenshot of chrome://flags
The HTML code is given :
<!DOCTYPE html>
<meta charset="UTF-8">
<html>
<head>
<title>WebGL Cube with Texture</title>
<script type="x-shader/x-vertex" id="vshader">
attribute vec3 coords;
attribute vec2 texCoords;
uniform vec3 normal;
uniform mat4 modelview;
uniform mat4 projection;
uniform mat3 normalMatrix;
varying vec3 vNormal;
varying vec2 vTexCoords;
void main() {
vec4 coords = vec4(coords,1.0);
vec4 transformedVertex = modelview * coords;
vNormal = normalMatrix * normal;
vTexCoords = texCoords;
gl_Position = projection * transformedVertex;
}
</script>
<script type="x-shader/x-fragment" id="fshader">
precision mediump float;
uniform bool textured;
uniform sampler2D sampler;
varying vec3 vNormal;
varying vec2 vTexCoords;
uniform vec4 color;
void main() {
if (textured) {
vec4 color = texture2D(sampler, vTexCoords);
vec3 unitNormal = normalize(vNormal);
float multiplier = abs(unitNormal.z);
gl_FragColor = vec4( multiplier*color.r, multiplier*color.g, multiplier*color.b, color.a );
}
else {
gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0); // use basic white when texture's not there.
}
}
</script>
<script type="text/javascript" src="gl-matrix-min.js"></script>
<script type="text/javascript" src="simple-rotator.js"></script>
<script type="text/javascript">
"use strict";
var gl; // The webgl context.
var aCoords; // Location of the coords attribute variable in the shader program.
var aCoordsBuffer; // Buffer to hold coords.
var aTexCoords; // Location of the texCoords attribute variable in the shader program.
var aTexCoordsBuffer; // Buffer to hold texCoords.
var uProjection; // Location of the projection uniform matrix in the shader program.
var uModelview; // Location of the modelview unifirm matrix in the shader program.
var uNormal; // Location of the normal uniform in the shader program.
var uColor; // Location of the color uniform in the shader program, used only for axes.
var uTextured; // Location of the textured uniform in the shader program.
var uSampler; // Location of the sampler in the shader program.
var uNormalMatrix; // Location of the normalMatrix uniform matrix in the shader program.
var projection = mat4.create(); // projection matrix
var modelview = mat4.create(); // modelview matrix
var normalMatrix = mat3.create(); // matrix, derived from modelview matrix, for transforming normal vectors
var rotator; // A SimpleRotator object to enable rotation by mouse dragging.
var textureID = null; // Texture object, to be created after image has loaded.
/* Draws a colored cube, along with a set of coordinate axes.
* (Note that the use of the above drawPrimitive function is not an efficient
* way to draw with WebGL. Here, the geometry is so simple that it doesn't matter.)
*/
function draw() {
gl.clearColor(0,0,0,1);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
if (document.getElementById("persproj").checked) {
mat4.perspective(projection, Math.PI/4, 1, 2, 10);
}
else {
mat4.ortho(projection,-2.5, 2.5, -2.5, 2.5, 2, 10);
}
gl.uniformMatrix4fv(uProjection, false, projection );
var modelview = rotator.getViewMatrix();
var saveModelview = mat4.clone(modelview);
if (textureID) {
gl.uniform1i( uTextured, 1 ); // Tell shader to use texture and lighting.
gl.bindTexture(gl.TEXTURE_2D, textureID); // Which texture should be used.
gl.uniform1i(uSampler, 0); // Set sampler in shadre to use texture unit zero.
}
else {
gl.uniform1i( uTextured, 0 ); // Cube will appear in plain white.
}
drawFace(modelview) // front face of the cube
mat4.rotateY(modelview,modelview,Math.PI/2); //right face
drawFace(modelview) // front face
mat4.rotateY(modelview,modelview,Math.PI/2); //back face
drawFace(modelview) // front face
mat4.rotateY(modelview,modelview,Math.PI/2); //left face
drawFace(modelview) // front face
modelview = mat4.clone(saveModelview);
mat4.rotateX(modelview,modelview,Math.PI/2);
drawFace(modelview) // top face
mat4.rotateX(modelview,modelview,Math.PI);
drawFace(modelview) // bottom face
}
/**
* Draws the front face of the cube, subject to a modelview transform.
*/
function drawFace(modelview) {
gl.uniformMatrix4fv(uModelview, false, modelview );
mat3.normalFromMat4(normalMatrix, modelview);
gl.uniformMatrix3fv(uNormalMatrix, false, normalMatrix);
gl.uniform3f(uNormal, 0, 0, 1);
gl.drawArrays(gl.TRIANGLE_FAN, 0, 4); // front face
}
/**
* Loads data for the front face of the cube into VBOs.
*/
function createFace() {
var vertices = [ -1,-1,1, 1,-1,1, 1,1,1, -1,1,1 ];
var texCoords = [ 0,0, 2,0, 2,2, 0,2 ];
gl.enableVertexAttribArray(aCoords);
gl.bindBuffer(gl.ARRAY_BUFFER,aCoordsBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
gl.vertexAttribPointer(aCoords, 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(aTexCoords);
gl.bindBuffer(gl.ARRAY_BUFFER,aTexCoordsBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(texCoords), gl.STATIC_DRAW);
gl.vertexAttribPointer(aTexCoords, 2, gl.FLOAT, false, 0, 0);
}
/**
* Load an image from the URL "textures/bridk001.jpg". The image is loade
* asynchronously. When the
*/
function loadTexture() {
var img = new Image();
img.onload = function() {
var id = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D,id);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img);
gl.generateMipmap(gl.TEXTURE_2D);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.REPEAT);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.REPEAT);
textureID = id;
draw();
}
img.src = "./skin.jpg";
}
/* Creates a program for use in the WebGL context gl, and returns the
* identifier for that program. If an error occurs while compiling or
* linking the program, an exception of type String is thrown. The error
* string contains the compilation or linking error. If no error occurs,
* the program identifier is the return value of the function.
*/
function createProgram(gl, vertexShaderSource, fragmentShaderSource) {
var vsh = gl.createShader( gl.VERTEX_SHADER );
gl.shaderSource(vsh,vertexShaderSource);
gl.compileShader(vsh);
if ( ! gl.getShaderParameter(vsh, gl.COMPILE_STATUS) ) {
throw "Error in vertex shader: " + gl.getShaderInfoLog(vsh);
}
var fsh = gl.createShader( gl.FRAGMENT_SHADER );
gl.shaderSource(fsh, fragmentShaderSource);
gl.compileShader(fsh);
if ( ! gl.getShaderParameter(fsh, gl.COMPILE_STATUS) ) {
throw "Error in fragment shader: " + gl.getShaderInfoLog(fsh);
}
var prog = gl.createProgram();
gl.attachShader(prog,vsh);
gl.attachShader(prog, fsh);
gl.linkProgram(prog);
if ( ! gl.getProgramParameter( prog, gl.LINK_STATUS) ) {
throw "Link error in program: " + gl.getProgramInfoLog(prog);
}
return prog;
}
/* Gets the text content of an HTML element. This is used
* to get the shader source from the script elements that contain
* it. The parameter should be the id of the script element.
*/
function getTextContent( elementID ) {
var element = document.getElementById(elementID);
var fsource = "";
var node = element.firstChild;
var str = "";
while (node) {
if (node.nodeType == 3) // this is a text node
str += node.textContent;
node = node.nextSibling;
}
return str;
}
/**
* Initializes the WebGL program including the relevant global variables
* and the WebGL state. Creates a SimpleView3D object for viewing the
* cube and installs a mouse handler that lets the user rotate the cube.
*/
function init() {
try {
var canvas = document.getElementById("glcanvas");
gl = canvas.getContext("webgl");
if ( ! gl ) {
gl = canvas.getContext("experimental-webgl");
}
if ( ! gl ) {
throw "Could not create WebGL context.";
}
var vertexShaderSource = getTextContent("vshader");
var fragmentShaderSource = getTextContent("fshader");
var prog = createProgram(gl,vertexShaderSource,fragmentShaderSource);
gl.useProgram(prog);
aCoords = gl.getAttribLocation(prog, "coords");
aTexCoords = gl.getAttribLocation(prog, "texCoords");
uModelview = gl.getUniformLocation(prog, "modelview");
uProjection = gl.getUniformLocation(prog, "projection");
uSampler = gl.getUniformLocation(prog, "sampler");
uNormal = gl.getUniformLocation(prog, "normal");
uColor = gl.getUniformLocation(prog, "color");
uTextured = gl.getUniformLocation(prog, "textured");
uNormalMatrix = gl.getUniformLocation(prog, "normalMatrix");
aCoordsBuffer = gl.createBuffer();
aTexCoordsBuffer = gl.createBuffer();
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE); // no need to draw back faces
document.getElementById("persproj").checked = true;
rotator = new SimpleRotator(canvas,draw);
rotator.setView( [2,2,5], [0,1,0], 6 );
}
catch (e) {
document.getElementById("message").innerHTML =
"Could not initialize WebGL: " + e;
return;
}
createFace();
loadTexture();
draw();
}
</script>
</head>
<body onload="init()" style="background-color:#DDD">
<h2>A Cube With a Brick Texture</h2>
<p id=message>Drag the mouse on the canvas to rotate the view.</p>
<p>
<input type="radio" name="projectionType" id="persproj" value="perspective" onchange="draw()">
<label for="persproj">Perspective projection</label>
<input type="radio" name="projectionType" id="orthproj" value="orthogonal" onchange="draw()" style="margin-left:1cm">
<label for="orthproj">Orthogonal projection</label>
<button onclick="rotator.setView( [2,2,5], [0,1,0], 6 ); draw()" style="margin-left:1cm">Reset View</button>
</p>
<noscript><hr><h3>This page requires Javascript and a web browser that supports WebGL</h3><hr></noscript>
<div>
<canvas width=600 height=600 id="glcanvas" style="background-color:red"></canvas>
</div>
</body>
</html>
All i get as an output is
The other functions are loading fine. The file paths are correct.
The issue is you need to run a simple web server for WebGL dev. It should take you about 2 minutes to get setup
See this
Try defining the minification and magnification parameters for the texture object.
eg:
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
Use the appropriate value for min and mag filter, based on your project requirement.

WebGL Texture resize unexpected output

When using textures in WebGL, sometimes I need to make them larger than they were originally. When I do that, it causes the textures to appear differently, especially on lighter backgrounds.
I have the following image (256 x 256):
When rendered in WebGL, it is slightly larger than the original image. Here is how the image appears on two different backgrounds:
As you can see, the image appears correctly on the dark background, but when on the light background, has a white outline.
My setup code:
gl.clearColor(0x22 / 0xFF, 0x22 / 0xFF, 0x22 / 0xFF, 1); // set background color
gl.enable(gl.BLEND); // enable transparency
gl.disable(gl.DEPTH_TEST); // disable depth test (causes problems with alpha if enabled)
gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA); //set up blending
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); //clear the gl canvas
gl.viewport(0, 0, canvas.width, canvas.height); //set the viewport
And this is the code called every time a texture is loaded:
function handleTextureLoaded(image, texture) {
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_NEAREST);
gl.generateMipmap(gl.TEXTURE_2D);
gl.bindTexture(gl.TEXTURE_2D, null);
loadCount++;
}
What is causing the outline to appear, and how do I fix it?
NOTE: When I put the original image on these same two backgrounds, this problem does not occur, even when I resize the image.
I tried disabling the alpha on the WebGL context (as told by #zfedoran):
gl = canvas.getContext('webgl', {antialias: false, alpha: false })
|| canvas.getContext('experimental-webgl', {antialias: false, alpha: false });
And a small blank border now appears around the image, like this (enlarged):
On top of the canvas's alpha as mentioned by #zfedoran how do you make the original image?
I believe the issue is as follows
Let's say you have an anti-aliased edge like this. What color is this pixel?
Assume the main color, the color of the pixels in the bottom right, was 1,0,0 (pure red). Ideally the pixel pointed to by the arrow would be (1,0,0,0.5). In other words, pure red with an alpha of 0.5. But, depending how on the image was created to generate that anti-aliased pixel it might have been blended with the purely transparent pixels next to it so it no longer pure red. Those purely transparent pixels are likely (0,0,0,0) which is transparent black.
Even if your drawing program handles this correctly, GL likely does not. When you draw an image with texture filtering on (gl.LINEAR etc) GL is going to average the pixels near each other, some of those pixels are transparent black. Blending black with red gives dark red. Hence you get a dark border.
Here you can see the issue
"use strict";
function main() {
var planeVertices = [
-1, -1,
1, -1,
-1, 1,
1, 1,
];
var texcoords = [
0, 1,
1, 1,
0, 0,
1, 0,
];
var indices = [
0, 1, 2,
2, 1, 3,
];
var canvas = document.getElementById("c");
var gl = canvas.getContext("webgl", {alpha:false});
var programs = {}
programs.normalProgram = twgl.createProgramFromScripts(
gl, ["2d-vertex-shader", "2d-fragment-shader"], ["a_position", "a_texcoord"]);
programs.preMultiplyAlphaProgram = twgl.createProgramFromScripts(
gl, ["2d-vertex-shader", "pre-2d-fragment-shader"], ["a_position", "a_texcoord"]);
var positionLoc = 0; // assigned in createProgramsFromScripts
var texcoordLoc = 1; // assigned in createProgramsFromScripts
var buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(
gl.ARRAY_BUFFER,
new Float32Array(planeVertices),
gl.STATIC_DRAW);
gl.enableVertexAttribArray(positionLoc);
gl.vertexAttribPointer(positionLoc, 2, gl.FLOAT, false, 0, 0);
var buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(
gl.ARRAY_BUFFER,
new Float32Array(texcoords),
gl.STATIC_DRAW);
gl.enableVertexAttribArray(texcoordLoc);
gl.vertexAttribPointer(texcoordLoc, 2, gl.FLOAT, false, 0, 0);
var buffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffer);
gl.bufferData(
gl.ELEMENT_ARRAY_BUFFER,
new Uint16Array(indices),
gl.STATIC_DRAW);
var img = new Image();
img.onload = createTextures;
img.src = document.getElementById("i").text;
function createTexture() {
var tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img);
gl.generateMipmap(gl.TEXTURE_2D); // assuming power-of-2
return tex;
}
var textures = {};
function createTextures() {
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, false);
textures.unpremultipliedAlphaTexture = createTexture();
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true);
textures.premultipliedAlphaTexture = createTexture();
document.body.appendChild(document.createElement("hr"));
insert("original image");
document.body.appendChild(img);
render();
}
function insert(text) {
var pre = document.createElement("pre");
pre.appendChild(document.createTextNode(text));
document.body.appendChild(pre);
};
function grabImage(prg, blend, texName) {
document.body.appendChild(document.createElement("hr"));
insert(
"gl.useProgram(" + prg + ")\n" +
"gl.blendFunc(gl." + blend.src + ", gl." + blend.dst + ")\n" +
"gl.bindTexture(gl.TEXTURE2D, " + texName + ")");
var img = new Image();
img.src = gl.canvas.toDataURL();
document.body.appendChild(img);
};
function render() {
gl.enable(gl.BLEND);
Object.keys(programs).forEach(function(p, pndx) {
gl.useProgram(programs[p]);
[
{ src: "SRC_ALPHA", dst: "ONE_MINUS_SRC_ALPHA" },
{ src: "ONE", dst: "ONE_MINUS_SRC_ALPHA" },
].forEach(function(b, bndx) {
gl.blendFunc(gl[b.src], gl[b.dst]);
Object.keys(textures).forEach(function(texName, tndx) {
gl.bindTexture(gl.TEXTURE_2D, textures[texName]);
gl.clearColor(0x3D/0xFF, 0x87/0xFF, 0xEA/0xFF, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0);
grabImage(p, b, texName);
});
});
});
}
}
main();
canvas {
border: 1px solid black;
display: none;
}
img {
background-color: #3D87EA;
border: 1px solid black;
width: 256px;
height: 256px;
}
<script src="https://twgljs.org/dist/3.x/twgl.min.js"></script>
<!-- vertex shader -->
<script id="2d-vertex-shader" type="x-shader/x-vertex">
attribute vec4 a_position;
attribute vec2 a_texcoord;
varying vec2 v_texcoord;
void main() {
gl_Position = a_position;
v_texcoord = a_texcoord;
}
</script>
<!-- fragment shaders -->
<script id="2d-fragment-shader" type="x-shader/x-fragment">
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D u_texture;
void main() {
gl_FragColor = texture2D(u_texture, v_texcoord);
}
</script>
<script id="pre-2d-fragment-shader" type="x-shader/x-fragment">
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D u_texture;
void main() {
vec4 textureColor = texture2D(u_texture, v_texcoord);
gl_FragColor = vec4(textureColor.rgb * textureColor.a, textureColor.a);
}
</script>
<canvas id="c" width="32" height="32"></canvas>
<script type="not-js" id="i">data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAQAAAAEACAYAAABccqhmAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA7dpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wUmlnaHRzPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvcmlnaHRzLyIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcFJpZ2h0czpNYXJrZWQ9IkZhbHNlIiB4bXBNTTpPcmlnaW5hbERvY3VtZW50SUQ9InhtcC5kaWQ6RjgxNENDMDEzQjNGNjgxMTgyMkFCRTQ0RTFGNjIxOTciIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6Q0VDQTU5RDNGNjBEMTFFMjhFRUVEMkI5NkRDNTM4RDYiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6Q0VDQTU5RDJGNjBEMTFFMjhFRUVEMkI5NkRDNTM4RDYiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOkRGODJCQjcxQkIyNDY4MTE4MjJBRkMwRDVCNTc4NTk3IiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOkY4MTRDQzAxM0IzRjY4MTE4MjJBQkU0NEUxRjYyMTk3Ii8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+7i5UrAAAEShJREFUeNrsnQmQFcUZx2cVlVslFeKKIAYlWtbqeoEiiiKoFTVIQC7PBJUkSgxYiBfGVBCBEBOC0ZhEK4emSErBijEeiVhRVEQUqcXyQGC9o3J5BhTZ/D/fEDe6y5uZ995M97zfr+qrVvb1e/P69ffv7+uZ7q5pamoKAKA62Y4mAEAAAAABAAAEAAAQAABAAAAAAQAABAAAEAAAQAAAAAEAAAQAABAAAEAAAAABAAAEAAAQAABAAAAAAQAABAAAEAAAQAAAAAEAAAQAABAAAEAAAAABAAAEAAAQAABAAAAAAQAABAAAEAAAaJ02Ll5UTU0Nv0yFaGhosMbdW1Yv21+2j6yHrFbWRbbr56q8I/tQtkHWKFsd2jOyx+vq6tbRqtFoampyz9ecvCgEoNxOb05+kuxY2VEtOHkprJAtkt0r+7sEYQMtjgAgANk7/VdVnCEbJdsvpY/dLHtIdrvsNonBu/wSCAACkJ7T25zON2QXyAZlfDmWNsyV3SghWMKvgwAgAJVz/DbhaH+ZrLeDl3if7GoJwSIEAAFAAMrr/N9UMT0oTOa5jgnBZAnBMgQAAUAASnP8r6m4QTbQs0v/JLzuKRKCdxAABAABiOf429soKrtKtpPHX+XfsnESgb8iAAgAAhDN+XuquE3WL0df60bZRAnBRgQAAUAAWnf+r6u4NSjvPXxXsIeKhksEnkMA0oVHgf1w/ktU3JVT5zfsicRF+p4n8msTARAB/H++P0f23Sr5yjZBeL4igVuIAIgAcP4g+EMVOb9h3/lmffcJ9AAEAOcPgjFV2gTXqQ0upieQAlSj89uXv1n2LbpncJ7Sgd+SAhABVBNTcf7/cZME8WSagQigWkb/04PCrT74jPdlRygSWE4EgADk2fkPUPG4rC0+/wVWyuolAu8jAKQAeXT+jkFhDT3O3zK9gsL6AUAAcsnPAj9W82XJmRLKMTQDKUDeRv/BKu6nK0bCdhjqrVTgTVIAIoA8OH87FTfREpHpLJtFMyAAeWGSbC+aIRZnSDgH0AykAL6P/rYNt81ut6MbxuZp2cFKBZp8umhSAGjOlTh/YuxMgyE0AxGAr6N/t6BwuMYOdMHqiQKIAGArE3D+skQBp9AMCIBvo38HFefSEmXhQpoAAfCN0bKdaYayMCg8AQkQAG9gpV/5sMmicTQDAuBL+G+jVT9aoqyMogmS04YmSJWhNEHZ6SFhPaiurm5pC4JrOyt1C20PWYdmf94keyMonE+wWvU3IQBQabh3XRlOlS2Vw9vcip2U1F92mOwQWfsI9Terrm1N/pTsAdk9EoR1VZFD8RxAauG/Lfldj+hWhNdlL8n6BIWNRUtli+xhme1O/JdyHVrChiDVLQB2sMfd+Kp3mGjbgq1ZEoK1eRMAJgHT4wiawEvsMJZLZSsl4lfJ2ufpyyEA6dGHJvAam1/4kWxZnlYjkgLED+XtEd462UGyHrLdZF3DvPG90F4LCs/6P6GwcVVYz/5td/woN8yWTdLv+zFzADkXADmvOe5pspNkRwfxjuW22WSbUOIOQP54SDYi6g5FCIBnAhCGej8ICotOtqe/Qwu8IhssEXgeAciJAMjxLV+fLjuW/g0RWCMbKBFoQAA8FoDwXv20oLDKjCOKIQ6WBvTbOueDAHgmAOHBHPNlrC6DpLwg6ysR2OCLAHAbsOD8NsH3GM4PJdJbdmt4wKsXbIfzN5yvYm4Q7ZlxgGLYnaLv+HKxVZ0CyPltZ57f0GehzHwosxWKL5ACuDvy24k8N9JXoQJYNDmbCMDRCEDOb4dx2PpxtuaCSnKCooD7iQDcGvntgZ7f4/yQAjNdnxCsxhRgvOwo+iakwIGy4xAAd0b/Lip+SL+EFLkIAXCHK2S70CchRU7SwNMDAch+9Lejpc+nP0LK2BzAEAQge8bKOtIfIQOGIQDZczb9EDLiKEWgnRCA7ML/XkFhRhYgKz/rgwBkx8n0QciYwxCA7OA4LsiaegQgO/rS/yBj9kQAssn/27ra+FBV7IEAZEMP+h44QC0CkA270ffAAbZHAPiOADhHynTgZwYHaEIAsuFd+h44wBoEIBvW0/fAAd528aLa5LGlw5V/dqqP7ft3An0PHOBlBKByDm/fo2/o8IPD/+YsP3CJ5QhAeZ3eDmE4PnT4Y2Sd6WPgMEsRgNKd3u7pf092esApPuAXixGA5I5v4fwk2ZSAE3zAP1bW1dW96OK24G08cH7bvnuebCD9CDzlblcvrI3jzm9beNnBCn3oQ+Axt7t6Ya4/B3Azzg+eY+cDLkQA4o/+Q1WMoP+A5/xa+X8TAhDP+e26ptF3wHPWBo6fPu1qBGDHKe1L/wHPmaXR/10EID7D6DvgOW/Irnf9Il0VADbxBN+5UKP/+whAMvam/4DHzJfzz/PhQl0VgHb0IfCUV2XjfLlYVwVgC/0IPOQj2TCN/m8jAKXxOn0JPGScnH+xTxfsqgAspS+BZ0yW8//Ot4t2VQDupz+BR0yS88/08cJdFYC5sk30K/Ag5z9Lzj/L1y/gpACoQW0H1V/Rv8BhGmUD1Ff/6POXcHk14NVB4WkqANcwpz9Qzr/I9y9S4+IuJTU1NZ+WDQ0N/VUskO1AnwMHeE42UY5/T5LKLvqa0/sBqKFtHfVw5gMgY2xL7/GFLpnM+YkAEkQAW1EkcJiKPwU8Igzp8ojsF7J5cvzNpb6Zk77mgwCEImCbgV4ku0DWjb4JFcCW7j4pmy+7U07/SjnfHAEoQQCaCYHtEDxIdrSsPigc/tkp4CAQiMaGoLBRx7qgcFyXhfevhPn9qkru3oMAlEEAAHyFSUAAQAAAAAEAAAQAABAAAEAAAAABAIAUaEMTpMtbjZ8eb753yuL7QdeewQpaHz4PDwKl6/xfUtEgq83g4+dIBL7vc/uNf/QNe9rzYNn6Of1qX/Tt+nkQCEZk5PxGJ8+dv7OKxaGt0P9fR3dCAHxjTEaf2yib6HnbzQhH/61MkAjsQ5dCAHwJ/3uo6J/BR38sG6nwf73Ho3+divNa+FMXehYC4AujMvrcS+T8iz1vu58HX1ztuVG2jG6FAPjC6Aw+807ZbM9z/1NVDGzhTwvn9KvdSLdCAHwI//cNCnsXpJ33f1ujf5PHzr+jita23P4HPQsB8IW0J/9sv/rhPuf9IbYDVK9W/vZPuhUCQP7fMhfL+Z/0PPT/iooprfzZdvR5mm6FAPgQ/h+qIs3bVXfI+a/PQdNNDVp/dmGB8n9OkEYAvCDNyb9VsrG+N5hG//oi34P8HwHwYvTfLsXw3/L+ERr938lB09ltv209D07+jwB4ge1cvDt5f6zR/zQVA7bxktUK/1fTtRAAH0hr9M9F3i/nb6ui2DHbjP4IgBfhv51nOIK8PxYTZD2LvIb8HwHwghNku5L3Rx79bZXk5UVeZg81LaBrIQA+kMbsfy7y/pBrZR2LvGap8v+1dC0EwPXwv52KIeT9kUd/e1birAgvJf9HALzAnL8DeX8k57fbfcVu+5H/Vwj2BPQz/LetxZYp0ijG+jBNcDlvHik7MsLrNskW0rUQANfD/11UnFjhj9k5tGLsGRQWIjkpABr9LVWaGfHlLP8lBfCCYbIdHbkWOwr7GofbapKse8TXkv8jAF4wxqFrsf0AnHxqTqN/NxWTY1RBABAA58N/u5d9jCOXM1vOP9/h5pouax/xtTaX8RQ9DAFwnRGOtOkTsktcbSSN/oerOCNGFZb/IgBeMNqBa7C833YB/shR59962y8O3P5DAJwP/23rqr7k/UUZk6CdyP8RAOcZSd5fdPS3h6NmxKzWqPB/Jd0LAXCdrGf/nc77Q2zWvxujPwKQt/DfTq7Zn7x/m6O/nYw0KUFV8n8EgNHf87w/CEP/tjHrsPwXAXB+9LdZ7VEZXoLr9/tt9D8yYRstU/6/hl6GALjMAUHxXWwqxaKEYXWazm99LOnxZOT/FYbFQKWzKsxT+0R8va0TaFeGz10f5v0fO94+ts7/kIR1yf8RALeRA76n4vgYKYNt4nFBGT76bH32y46P/rbDz7SE1Vn+SwqQu/kCO+K6HBuF/lTOf5cHX/kKWW3Cuo8q//+QXkMEkCcGyb5chrz/shRGb5vctP35uwSFZ/E3xKy/V1DY5Tcp5P9EALmj1LUCaeb9t8gelN0hWy6H7h6zvm30sVMJn0/+jwDkKvw3ZxjqQ94vZz9VxTnN/sme3jsvRn07EWl4iULH8l8EIFecLOvset4v57W7FLNa+FP7GG8zs8TLeFApxyd0GQQgT5TysFAqeX/IRbJeLfx7pBn58GTfUldFkv8jALkK/zuHEYDTeb+ct6uKK1v405ZwPiAKx5ThUhAABCBX2DkBbRPWTfN+/zWtpCmLFZJHPX5snxKv4WV91gq6DAKQJ053Oe9vFrqPLcOIXGqkwuiPAOQq/Lf7/sc5nvcb2zqdJ45TvokAIADwGXY7LO4DV6k+56/R384yGNDKn+1pvMdivN2/SrgU28/gProMApAnkuwVkFreL+e3uYlZ23Jo5eRxNhoxsVie8HJu12eto8sgAHkJ/20XnCNjVvtJys/52+O6PcsVksuBbROPJEuUPwgKawcAAcgNI4Nop95u5RHZ5WldnEb/3SJ8XuycXCJwr4qpMarYbcaxqtdIl0EA8kScZ//XykZp9N+c4vVdK+u4jb+/LWtI8sZy5ilhJFBsHsN2/Bmi1/+Z7oIA5Cn831fFQTGqnCnnfzXF0f9Qm2soNvqHIX2QUARsbsE2S7U7DJ//bs/KrpLtp9f9jR6TDSwHdmP0nyHnvydF5996Ok+x9KTkW3LhQz02zzBBn2tHp9uDRmv17x/QRRCAPBP12X/L+69M+dpsU5Iok5NlvScf7imwga7hDjVNTU3uXVRNjdeNqvDf9sBbEjHvr0859Lf9CJ+T9Sjy0hVy2N64SPlw0deYA6gMUe/9p5r3h0yK4PwGG3JUAQhA+Ud/a9Mo5wSmmveHo79t7DE54st5JBcBgAT0D4qff5dF3m/Ybb8oG3vEWf4LCADECP+zuN9vo//hlnJEfPmSuJuAAgJA+N8Y7KDiNNfy/ma3/aJC/o8AQAIGB4VttJ3J+5tFJXG26XqAnxIBgPiMdi3v1+hvOf+MGFX+I3uUnxIBgHjhv91fH+pS3h9yaVB8UrI5Dyv/38QvigBAPPrJOriS94ejv93vj7s0l/wfAYAEmINvaeHfp2aU9wdh6B93M1Lu/yMAEBc5+fNB4TbbS+E/2W20HweFFW+po9HfIpK4ZxHY0txl/JrVA2sBKjMf0EGCkNlqNzm/Cfti2SExq85V/j8at6gMrAWonmgg66WuZyVwfsJ/UgDwHY3+tsPPtITVEQAEADzH9virTVDvRYX/L9F8CAD4O/rvpWIioz8gANWJHcu9EwIACED1jf5HB4VTiJLA8l8EADx2fvsdZ5fwFk9xIg8CAP5ip/rWl1Cfx38RAPB09O8UxDuFh/wfEIAcYXvudy2hPst/EQDwmHNLrL9Q+f9GmhEBAP/C/+4qupf4NuT/CAB4Ss8yvAf5PwIAnvJJifVtpyKW/yIA4CmNJdZfoPx/C82IAICHyHlfV/FCCW8xn1ZEAMBvbkhY7y3ZPJoPAQC/uUnWkKDexez+CwiA/2mA3cO3w0jXxKj2S9W7ldYDBCAfIvCsCjv7b0mRl9qIb08OjqfVwGBT0BwRrgo8RXaOrI9sd9lHsmdk94Yj/2u0VDY46WsuXhQAkAIAAAIAAAgAACAAAIAAAAACAAAIAAAgAACAAAAAAgAACAAAIAAAgAAAAAIAAAgAAAIAAAgAACAAAIAAAAACAAAIAAAgAACAAAAAAgAACAAAIAAAgAAAAAIAAAgAACAAAIAAAAACAACu8F8BBgDlSreLhu1kMQAAAABJRU5ErkJggg==</script>
There's a few solutions
Make sure transparent area actually has color in.
In other words, if all the pixels in the top left of the image above are RED with 0 alpha then when the pixels get filtered they'll be blending (1,0,0,0) transparent red instead of (0,0,0,0) transparent black. Unfortunately there's no easy way to do this in most drawing programs.
There's a plugin for Photoshop that lets you do it called SuperPNG It lets you create a 4th channel for the alpha instead of using photoshop's transparency. That lets you set the alpha separate from the image.
In your case you'd end up with an image with layers like this
Now there are no bad colors to blend with.
Switch to pre-multiplied alpha
In this case before calling gl.texImage2D to upload the image call
gl.pixelStorei(UNPACK_PREMULTIPLY_ALPHA_WEBGL, true);
before calling gl.texImage2D. That tells WebGL to multiply the colors by their alpha when the image is loaded. You then use blending with
gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA);
Turn off filtering in GL
gl.texParameter(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameter(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
Assuming your source image doesn't have any bad colors this means GL won't making new bad colors as it filters but of course it also means if you scale or rotate the image you'll get aliasing.
Create your own mips
Most apps use gl.genereateMipmap to generate mips but you can generate them yourself offline and upload them yourself. That's not a perfect solution either but it does let you use `gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_NEAREST);
Combinations of the above
Have you tried disabling the alpha on the WebGL context?
var gl = this.canvas.getContext('webgl', {antialias: false, alpha: false })
|| this.canvas.getContext('experimental-webgl', {antialias: false, alpha: false });

Categories

Resources