Related
I have been trying to create shading in webgl just like in this image: where we can see a cone, a sphere, and a light (which we can change his position with the sliders).
I've tried to write some code in the html file by seeing multiple examples of shading from some webgl tutoring sites, but right now, I can't even see the shapes. It's sure that I'm doing something wrong, but I just don't know where. Here's my code and I also included a link because it contains multiple files. Thanks in advance.
Link: https://wetransfer.com/downloads/cd0f66f2e2866c0d118e95b02e01cb0520200923203442/274553
<html>
<head>
<title>Light and Shading</title>
<meta http-equiv='content-type' content='text/html; charset=ISO-8859-1'>
<!-- CSS Styles //-->
<link href='css/style.css' type='text/css' rel='stylesheet'>
<link href='css/desert.css' type='text/css' rel='stylesheet'/>
<link href='css/colorpicker.css' type='text/css' rel='stylesheet'/>
<link href='css/smoothness/jquery-ui-1.8.13.custom.css' type='text/css' rel='stylesheet' />
<!-- JavaScript Libraries //-->
<script type='text/javascript' src='js/gl-matrix-min.js'></script>
<script type='text/javascript' src='js/jquery-1.5.1.min.js'></script>
<script type='text/javascript' src='js/jquery-ui-1.8.13.custom.min.js'></script>
<script type='text/javascript' src='js/prettify.js'></script>
<script type='text/javascript' src='js/utils.js'></script>
<script type='text/javascript' src='js/colorpicker.js'></script>
<script type='text/javascript' src='js/codeview.js'></script>
<script id="shader-vs" type="x-shader/x-vertex">
attribute vec3 aVertexPosition;
attribute vec3 aVertexNormal;
// matrice model-view combinee.
uniform mat4 uMVMatrix;
// matrice de projection
uniform mat4 uPMatrix;
// matrice des normales.
uniform mat3 uNMatrix;
// position de la lumiere.
uniform vec3 uLightPosition;
// La normale transformee
varying vec3 vNormal;
// la direction vertex-lumiere
varying vec3 vLightRay;
// la direction camera-vertex
varying vec3 vEyeVec;
uniform vec4 uLightAmbient;
uniform vec4 uLightDiffuse;
uniform vec4 uLightSpecular;
uniform vec4 uMaterialAmbient;
uniform vec4 uMaterialDiffuse;
uniform vec4 uMaterialSpecular;
void main(void) {
vec4 ambientProduct= uLightAmbient* uMaterialAmbient;
vec4 diffuseProduct= uLightDiffuse*uMaterialDiffuse;
vec4 specularProduct= uLightSpecular*uMaterialSpecular;
vec3 pos = (uMVMatrix*vec4(aVertexPosition, 1.0)).xyz;
// position de l'oeil/camera.
const vec3 eyePosition = vec3(0,0,-40);
//Transformed normal position
vNormal = normalize((uNMatrix* aVertexNormal).xyz) ;
//Transformed light position
vec4 light = uMVMatrix * vec4(uLightPosition,1.0);
vec3 lightPos = (uMVMatrix * light).xyz;
//Light position
vLightRay = normalize(pos - lightPos);
//Vector Eye
vEyeVec = -normalize(pos);
//Final vertex position
gl_Position = uMVMatrix*uPMatrix* vec4(aVertexPosition, 1.0);
}
</script>
<script id="shader-fs" type="x-shader/x-fragment">
#ifdef GL_ES
precision highp float;
#endif
varying vec3 vNormal;
varying vec3 vLightRay;
varying vec3 vEyeVec;
uniform vec4 ambientProduct;
uniform vec4 diffuseProduct;
uniform vec4 specularProduct;
uniform float uShininess;
void main(void)
{
vec4 diffuse = max(dot( vNormal,vLightRay), 0.0) * diffuseProduct;
vec3 H = normalize(vLightRay+vEyeVec);
vec4 specular =
pow(max(dot(vNormal, H), 0.0), uShininess) * specularProduct;
if (dot(vLightRay, vNormal) < 0.0)
specular = vec4(0.0, 0.0, 0.0, 1.0);
vec4 fColor = ambientProduct + diffuse + specular;
fColor.a = 1.0;
gl_FragColor =fColor;
}
</script>
<script id='code-js' type="text/javascript">
var gl = null; // WebGL context
var prg = null; // The program (shaders)
var c_width = 0; // Variable to store the width of the canvas
var c_height = 0; // Variable to store the height of the canvas
var mvMatrix = mat4.create(); // The Model-View matrix
var pMatrix = mat4.create(); // The projection matrix
var nMatrix = mat4.create(); // The normal matrix
var distance = -40;
var animateFlag = false;
var objects = [];
/**
* The program contains a series of instructions that tell the Graphic Processing Unit (GPU)
* what to do with every vertex and fragment that we pass it.
* The vertex shader and the fragment shader together are called the program.
*/
function initProgram() {
var fragmentShader = utils.getShader(gl, "shader-fs");
var vertexShader = utils.getShader(gl, "shader-vs");
prg = gl.createProgram();
gl.attachShader(prg, vertexShader);
gl.attachShader(prg, fragmentShader);
gl.linkProgram(prg);
if (!gl.getProgramParameter(prg, gl.LINK_STATUS)) {
alert("Could not initialise shaders");
}
gl.useProgram(prg);
prg.aVertexPosition = gl.getAttribLocation(prg, "aVertexPosition");
prg.aVertexNormal = gl.getAttribLocation(prg, "aVertexNormal");
prg.uPMatrix = gl.getUniformLocation(prg, "uPMatrix");
prg.uMVMatrix = gl.getUniformLocation(prg, "uMVMatrix");
prg.uNMatrix = gl.getUniformLocation(prg, "uNMatrix");
prg.uMaterialAmbient = gl.getUniformLocation(prg, "uMaterialAmbient");
prg.uMaterialDiffuse = gl.getUniformLocation(prg, "uMaterialDiffuse");
prg.uMaterialSpecular = gl.getUniformLocation(prg, "uMaterialSpecular");
prg.uShininess = gl.getUniformLocation(prg, "uShininess");
prg.uLightPosition = gl.getUniformLocation(prg, "uLightPosition");
prg.uLightAmbient = gl.getUniformLocation(prg, "uLightAmbient");
prg.uLightDiffuse = gl.getUniformLocation(prg, "uLightDiffuse");
prg.uLightSpecular = gl.getUniformLocation(prg, "uLightSpecular");
}
function initLights(){
//Light uniforms
gl.uniform3fv(prg.uLightPosition,[4.5,3.0,15.0]);
gl.uniform4f(prg.uLightAmbient ,1.0,1.0,1.0,1.0);
gl.uniform4f(prg.uLightDiffuse,1.0,1.0,1.0,1.0);
gl.uniform4f(prg.uLightSpecular,1.0,1.0,1.0,1.0);
//Object Uniforms
gl.uniform4f(prg.uMaterialAmbient, 0.1,0.1,0.1,1.0);
gl.uniform4f(prg.uMaterialDiffuse, 0.5,0.8,0.1,1.0);
gl.uniform4f(prg.uMaterialSpecular, 0.6,0.6,0.6,1.0);
gl.uniform1f(prg.uShininess, 200.0);
}
/**
* Creates an AJAX request to load the scene asynchronously
*/
function loadScene(){
loadObject('models/plane.json');
loadObject('models/cone.json','cone');
loadObject('models/sphere.json','sphere');
loadObject('models/smallsph.json','lightsource');
}
function getObject(alias){
for(var i=0; i<objects.length; i++){
if (alias == objects[i].alias) return objects[i];
}
return null;
}
/**
* Ajax and JSON in action
*/
function loadObject(filename,alias){
var request = new XMLHttpRequest();
console.info('Requesting ' + filename);
request.open("GET",filename);
request.onreadystatechange = function() {
if (request.readyState == 4) {
if(request.status == 404) {
console.info(filename + ' does not exist');
}
else {
var o = JSON.parse(request.responseText);
o.alias = (alias==null)?'none':alias;
handleLoadedObject(filename,o);
}
}
}
request.send();
}
/**
* Creates the buffers that contain the geometry of the object
*/
function handleLoadedObject(filename,object) {
console.info(filename + ' has been retrieved from the server');
var vertexBufferObject = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBufferObject);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(object.vertices), gl.STATIC_DRAW);
var normalBufferObject = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, normalBufferObject);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(calcNormals(object.vertices, object.indices)), gl.STATIC_DRAW);
var indexBufferObject = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBufferObject);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(object.indices), gl.STATIC_DRAW);
object.vbo = vertexBufferObject;
object.ibo = indexBufferObject;
object.nbo = normalBufferObject;
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
gl.bindBuffer(gl.ARRAY_BUFFER,null);
objects.push(object);
}
/**
* Main rendering function. Called every 500ms according to WebGLStart function (see below)
*/
function drawScene() {
gl.clearColor(0.3,0.3,0.3, 1.0);
gl.clearDepth(100.0);
gl.enable(gl.DEPTH_TEST);
gl.depthFunc(gl.LEQUAL);
gl.viewport(0, 0, c_width, c_height);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
mat4.perspective(60, c_width / c_height, 0.1, 1000.0, pMatrix);
try{
gl.enableVertexAttribArray(prg.aVertexPosition);
gl.enableVertexAttribArray(prg.aVertexNormal);
for (var i = 0; i < objects.length; i++){
var object = objects[i];
mat4.identity(mvMatrix);
mat4.translate(mvMatrix, [0.0, 0.0, distance]); //Sets the camera to a reasonable distance to view the part
mat4.rotate(mvMatrix, 30*Math.PI/180, [1,0,0]);
mat4.rotate(mvMatrix, angle*Math.PI/180, [0,1,0]);
if (object.alias == 'lightsource'){
var lightPos = gl.getUniform(prg, prg.uLightPosition);
mat4.translate(mvMatrix,lightPos);
}
gl.uniformMatrix4fv(prg.uMVMatrix, false, mvMatrix);
gl.uniformMatrix4fv(prg.uPMatrix, false, pMatrix);
mat4.set(mvMatrix, nMatrix);
mat4.inverse(nMatrix);
mat4.transpose(nMatrix);
gl.uniformMatrix4fv(prg.uNMatrix, false, nMatrix);
gl.uniform4fv(prg.uMaterialAmbient, object.ambient);
gl.uniform4fv(prg.uMaterialDiffuse, object.diffuse);
gl.uniform4fv(prg.uMaterialSpecular, object.specular);
gl.bindBuffer(gl.ARRAY_BUFFER, object.vbo);
gl.vertexAttribPointer(prg.aVertexPosition, 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(prg.aVertexPosition);
gl.bindBuffer(gl.ARRAY_BUFFER, object.nbo);
gl.vertexAttribPointer(prg.aVertexNormal, 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(prg.aVertexNormal);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, object.ibo);
gl.drawElements(gl.TRIANGLES, object.indices.length, gl.UNSIGNED_SHORT,0);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
}
}
catch(err){
alert(err);
message(err.description);
}
}
var lastTime = 0;
var angle = 0;
/**
* Updates the angle of rotation by a little bit each time
*/
function animate() {
var timeNow = new Date().getTime();
if (lastTime != 0) {
var elapsed = timeNow - lastTime;
if (animateFlag) angle += (90 * elapsed) / 10000.0;
}
lastTime = timeNow;
}
/**
* Render Loop
*/
function renderLoop() {
requestAnimFrame(renderLoop);
drawScene();
animate();
}
/**
* Entry point. This function is invoked when the page is loaded
*/
function runWebGLApp() {
//Obtains a WebGL context
gl = utils.getGLContext("canvas-element-id");
//Initializes the program (shaders)
initProgram();
//Initializes lights
initLights();
//Load Scene
loadScene();
//Renders the scene!
renderLoop();
}
</script>
</head>
<body onLoad='runWebGLApp()'>
<div id='top'>
<div id='contents'>
<div id='canvasContainer'>
<canvas id='canvas-element-id' width='480' height='400'>
Your browser does not support the HTML5 canvas element.
</canvas>
</div>
</div>
<div id='bottom'>
<table style='padding=0px'>
<tr>
<td>X:</td><td id='slider-x-value' width='30px'>4.5</td><td width='150px'><div id='slider-x'/></td>
</tr>
<tr>
<td>Y:</td><td id='slider-y-value' width='30px'>3.0</td><td width='150px'><div id='slider-y'/></td>
</tr>
<tr>
<td>Z:</td> <td id='slider-z-value' width='30px'>15.0</td><td width='150px'><div id='slider-z'/></td>
</tr>
</table>
</div>
<script>cview.run(cview.MODE_VIEW);</script>
<script>
$('#slider-shininess').slider({value:200, min:1, max:300, step:1, slide:updateShininess});
$('#slider-x').slider({value:4.5, min:-50, max:50, step:0.1, slide:updateLightPosition, change:updateLightPosition});
$('#slider-y').slider({value:3.0, min:0, max:50, step:0.1, slide:updateLightPosition, change:updateLightPosition});
$('#slider-z').slider({value:15.0, min:-50, max:50, step:0.1, slide:updateLightPosition, change:updateLightPosition});
$('#animate-btn').button();
$('#animate-btn').click(
function(){
if ($('#animate-btn:checked').val()==null){
animateFlag = false;
}
else{
animateFlag = true;
}
});
function updateShininess(){
var v = $('#slider-shininess').slider("value");
gl.uniform1f(prg.uShininess, v);
$('#slider-shininess-value').html(v);
}
function updateLightPosition(){
var x = $('#slider-x').slider("value");
var y = $('#slider-y').slider("value");
var z = $('#slider-z').slider("value");
gl.uniform3fv(prg.uLightPosition, [x,y,z]);
$('#slider-x-value').html(x);
$('#slider-y-value').html(y);
$('#slider-z-value').html(z);
}
function updateDistance(){
var d = $('#slider-distance').slider("value");
$('#slider-distance-value').html(distance);
distance = -d;
}
function updateObjectColor(alias, r,g,b){
var object = getObject(alias);
if (object != null){
object.diffuse = [r,g,b,1.0];
}
}
$('#colorSelectorSphere').ColorPicker({
onSubmit: function(hsb, hex, rgb, el) {
$(el).val(hex);
$(el).ColorPickerHide();
},
color: '#00ff00',
onShow: function (colpkr) {
$(colpkr).fadeIn(500);
return false;
},
onHide: function (colpkr) {
$(colpkr).fadeOut(500);
return false;
},
onChange: function (hsb, hex, rgb) {
$('#colorSelectorSphere div').css('backgroundColor', '#' + hex);
updateObjectColor('sphere',rgb.r/256,rgb.g/256,rgb.b/256);
},
onBeforeShow: function (colpkr) {
$(this).ColorPickerSetColor('rgb(0.5,0.8,0.1)');
}
})
$('#colorSelectorCone').ColorPicker({
onSubmit: function(hsb, hex, rgb, el) {
$(el).val(hex);
$(el).ColorPickerHide();
},
color: '#00ff00',
onShow: function (colpkr) {
$(colpkr).fadeIn(500);
return false;
},
onHide: function (colpkr) {
$(colpkr).fadeOut(500);
return false;
},
onChange: function (hsb, hex, rgb) {
$('#colorSelectorCone div').css('backgroundColor', '#' + hex);
updateObjectColor('cone',rgb.r/256,rgb.g/256,rgb.b/256);
},
onBeforeShow: function (colpkr) {
$(this).ColorPickerSetColor('rgb(0.8,0.1,0.5)');
}
})
// Calcule les normales des vertex. La normale de chaque vertex est
// la moyenne des triangles voisins.
//
// vertices: la liste des vertex.
// ind: la liste des indices.
// retour: la liste des normales par vertex.
function calcNormals(vertices, ind){
var x=0;
var y=1;
var z=2;
var v1 = [], v2 = [], thisNormal = [];
// initialiser la liste des normales.
var ns = [];
for(var i=0;i<vertices.length;i++)
{
ns[i]=0.0;
}
for(var i=0;i<ind.length;i=i+3){
//v1 = p1 - p0
v1[x] = vertices[3*ind[i+1]+x] - vertices[3*ind[i]+x];
v1[y] = vertices[3*ind[i+1]+y] - vertices[3*ind[i]+y];
v1[z] = vertices[3*ind[i+1]+z] - vertices[3*ind[i]+z];
// v2 = p2 - p1
v2[x] = vertices[3*ind[i+2]+x] - vertices[3*ind[i]+x];
v2[y] = vertices[3*ind[i+2]+y] - vertices[3*ind[i]+y];
v2[z] = vertices[3*ind[i+2]+z] - vertices[3*ind[i]+z];
// N = v2 x v1 (cross product).
thisNormal[x] = v1[y]*v2[z] - v1[z]*v2[y];
thisNormal[y] = v1[z]*v2[x] - v1[x]*v2[z];
thisNormal[z] = v1[x]*v2[y] - v1[y]*v2[x];
for(j=0;j<3;j++)
{
// N += thisNormal. on additionne les normales.
ns[3*ind[i+j]+x] = ns[3*ind[i+j]+x] + thisNormal[x];
ns[3*ind[i+j]+y] = ns[3*ind[i+j]+y] + thisNormal[y];
ns[3*ind[i+j]+z] = ns[3*ind[i+j]+z] + thisNormal[z];
}
}
// Normalisation.
for(var i=0;i<vertices.length;i=i+3){
var nn=[];
var len = 0;
for(var j = 0; j < 3; j++)
{
nn[j] = ns[i+j];
len += nn[j] * nn[j];
}
// La norme de la normale.
len = Math.sqrt(len);
if (len == 0)
len = 0.00001;
for(var j = 0; j < 3; j++)
ns[i+j] = nn[j] / len;
console.log(len);
}
return ns;
}
</script>
</body>
</html>
Honestly, there are too many issues to cover. Trying to get your code work. First you really need to learn how to make a minimal repo. The code you posted doesn't run, references several scripts that don't exist, and data that doesn't exist.
The tutorial the code is based off appears to be old. No one uses XMLHttpRequest in 2020. There's no such function as requestAnimFrame it's requestAnimationFrame. I think that's left over from a polyfill from like 2011. It's still using <body onload=""> which few use anymore. It's also using new Date().getTime() which there's no reason to use as the time is passed into requestAnimationFrame. It's calling some function to get a webgl context but there's no reason for that either in 2020. It's also apparently using an old version of glMatrix because the current version uses a different API. In the current version every function takes a matrix to store the result as the first argument. Also in the current version perspective takes the field of view in radians. I have no idea if it used to take it in degrees but the code was passing degrees.
I changed the XHR code to just return a cube. (an example of the minimal and complete parts of an "minimal complete verifiable example" (mcve) - I think now S.O calls them a "minimal reproducible example". I also removed all the references to ColorPicker (another example of making a minimal repo)
The code should have gotten errors in the JavaScript console. Did you check the JavaScript console? In particular uNMatrix is a mat3 but the code was calling gl.uniformMatrix4fv to set it which is an error.
Using webgl-lint pointed out several uniforms were not being set including "ambientProduct", "diffuseProduct", "specularProduct" and several were being set that don't exist (that part is not a bug necessarily) but there are several uniforms in the vertex shader that are not actually used and for example the colors seem to be set with
gl.uniform4fv(prg.uMaterialAmbient, object.ambient);
gl.uniform4fv(prg.uMaterialDiffuse, object.diffuse);
gl.uniform4fv(prg.uMaterialSpecular, object.specular);
but those uniforms are not used in the shaders (they appear in the shader but if you look at the code you'll see the effect nothing)
Then, when I finally got rid of all the errors the remaining issue to get something on the screen was the math for gl_Position had the 2 matrices backward. It should be projection * modelView * position but it had modelView * projection * position
But, further, all 3 models are drawn at the same position. Maybe the geometry in those models are at different positions. I have no idea but normally you'd want to give each thing you draw it's own position in some form or another, based on the loop index or some array of positions or per object data.
Anyway, that gets something on the screen but having spent 45 minutes already I don't feel like trying to fix the lighting. Rather, I suggest you read some slightly more up to date tutorials like this one and the ones it links to.
var mat4 = glMatrix.mat4;
var gl = null; // WebGL context
var prg = null; // The program (shaders)
var c_width = 480; // Variable to store the width of the canvas
var c_height = 400; // Variable to store the height of the canvas
var mvMatrix = mat4.create(); // The Model-View matrix
var pMatrix = mat4.create(); // The projection matrix
var nMatrix = mat4.create(); // The normal matrix
var distance = -40;
var animateFlag = false;
var objects = [];
const utils = {
getShader(gl, id) {
const elem = document.getElementById(id);
const type = /vertex/.test(elem.type) ?
gl.VERTEX_SHADER :
gl.FRAGMENT_SHADER;
const sh = gl.createShader(type);
gl.shaderSource(sh, elem.text);
gl.compileShader(sh);
if (!gl.getShaderParameter(sh, gl.COMPILE_STATUS)) {
throw new Error(gl.getShaderInfoLog(sh));
}
return sh;
},
};
/**
* The program contains a series of instructions that tell the Graphic Processing Unit (GPU)
* what to do with every vertex and fragment that we pass it.
* The vertex shader and the fragment shader together are called the program.
*/
function initProgram() {
var fragmentShader = utils.getShader(gl, "shader-fs");
var vertexShader = utils.getShader(gl, "shader-vs");
prg = gl.createProgram();
gl.attachShader(prg, vertexShader);
gl.attachShader(prg, fragmentShader);
gl.linkProgram(prg);
if (!gl.getProgramParameter(prg, gl.LINK_STATUS)) {
alert("Could not initialise shaders");
}
gl.useProgram(prg);
prg.aVertexPosition = gl.getAttribLocation(prg, "aVertexPosition");
prg.aVertexNormal = gl.getAttribLocation(prg, "aVertexNormal");
prg.uPMatrix = gl.getUniformLocation(prg, "uPMatrix");
prg.uMVMatrix = gl.getUniformLocation(prg, "uMVMatrix");
prg.uNMatrix = gl.getUniformLocation(prg, "uNMatrix");
prg.uMaterialAmbient = gl.getUniformLocation(prg, "uMaterialAmbient");
prg.uMaterialDiffuse = gl.getUniformLocation(prg, "uMaterialDiffuse");
prg.uMaterialSpecular = gl.getUniformLocation(prg, "uMaterialSpecular");
prg.uShininess = gl.getUniformLocation(prg, "uShininess");
prg.uLightPosition = gl.getUniformLocation(prg, "uLightPosition");
prg.uLightAmbient = gl.getUniformLocation(prg, "uLightAmbient");
prg.uLightDiffuse = gl.getUniformLocation(prg, "uLightDiffuse");
prg.uLightSpecular = gl.getUniformLocation(prg, "uLightSpecular");
prg.ambientProduct = gl.getUniformLocation(prg, "ambientProduct");
prg.diffuseProduct = gl.getUniformLocation(prg, "diffuseProduct");
prg.specularProduct = gl.getUniformLocation(prg, "specularProduct");
}
function initLights() {
//Light uniforms
gl.uniform3fv(prg.uLightPosition, [4.5, 3.0, 15.0]);
gl.uniform4f(prg.uLightAmbient, 1.0, 1.0, 1.0, 1.0);
gl.uniform4f(prg.uLightDiffuse, 1.0, 1.0, 1.0, 1.0);
gl.uniform4f(prg.uLightSpecular, 1.0, 1.0, 1.0, 1.0);
//Object Uniforms
gl.uniform4f(prg.uMaterialAmbient, 0.1, 0.1, 0.1, 1.0);
gl.uniform4f(prg.uMaterialDiffuse, 0.5, 0.8, 0.1, 1.0);
gl.uniform4f(prg.uMaterialSpecular, 0.6, 0.6, 0.6, 1.0);
gl.uniform1f(prg.uShininess, 200.0);
}
/**
* Creates an AJAX request to load the scene asynchronously
*/
function loadScene() {
loadObject('models/plane.json');
loadObject('models/cone.json', 'cone');
loadObject('models/sphere.json', 'sphere');
loadObject('models/smallsph.json', 'lightsource');
}
function getObject(alias) {
for (var i = 0; i < objects.length; i++) {
if (alias == objects[i].alias) return objects[i];
}
return null;
}
/**
* Ajax and JSON in action
*/
const vertices = [
-1, -1, 1,
1, -1, 1,
-1, 1, 1,
1, 1, 1,
1, -1, 1,
1, -1, -1,
1, 1, 1,
1, 1, -1,
1, -1, -1,
-1, -1, -1,
1, 1, -1,
-1, 1, -1,
-1, -1, -1,
-1, -1, 1,
-1, 1, -1,
-1, 1, 1,
1, 1, -1,
-1, 1, -1,
1, 1, 1,
-1, 1, 1,
1, -1, 1,
-1, -1, 1,
1, -1, -1,
-1, -1, -1,
];
let modelNum = 0
function loadObject(filename, alias) {
setTimeout(() => {
const o = {
alias: (alias == null) ? 'none' : alias,
ambient: [0.1, 0.1, 0.1, 1],
diffuse: [Math.random(), Math.random(), Math.random(), 1],
specular: [1, 1, 1, 1],
// to make up for the fact the code does not have different positions
// for each model we'll move the vertices (bad)
vertices: vertices.map((v, i) => i % 3 === 0 ? v + modelNum * 3 : v),
indices: [
0, 1, 2, 2, 1, 3,
4, 5, 6, 6, 5, 7,
8, 9, 10, 10, 9, 11,
12, 13, 14, 14, 13, 15,
16, 17, 18, 18, 17, 19,
20, 21, 22, 22, 21, 23,
],
};
handleLoadedObject(filename, o);
++modelNum;
});
}
/**
* Creates the buffers that contain the geometry of the object
*/
function handleLoadedObject(filename, object) {
//console.info(filename + ' has been retrieved from the server');
var vertexBufferObject = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBufferObject);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(object.vertices), gl.STATIC_DRAW);
var normalBufferObject = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, normalBufferObject);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(calcNormals(object.vertices, object.indices)), gl.STATIC_DRAW);
var indexBufferObject = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBufferObject);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(object.indices), gl.STATIC_DRAW);
object.vbo = vertexBufferObject;
object.ibo = indexBufferObject;
object.nbo = normalBufferObject;
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
objects.push(object);
}
/**
* Main rendering function. Called every 500ms according to WebGLStart function (see below)
*/
function drawScene() {
gl.clearColor(0.3, 0.3, 0.3, 1.0);
//gl.clearDepth(100.0);
gl.enable(gl.DEPTH_TEST);
gl.depthFunc(gl.LEQUAL);
gl.viewport(0, 0, c_width, c_height);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
mat4.perspective(pMatrix, 60 * Math.PI / 180, c_width / c_height, 0.1, 1000.0);
gl.enableVertexAttribArray(prg.aVertexPosition);
gl.enableVertexAttribArray(prg.aVertexNormal);
for (var i = 0; i < objects.length; i++) {
var object = objects[i];
mat4.identity(mvMatrix);
mat4.translate(mvMatrix, mvMatrix, [0.0, 0.0, distance]); //Sets the camera to a reasonable distance to view the part
mat4.rotate(mvMatrix, mvMatrix, 30 * Math.PI / 180, [1, 0, 0]);
mat4.rotate(mvMatrix, mvMatrix, angle * Math.PI / 180, [0, 1, 0]);
if (object.alias == 'lightsource') {
var lightPos = gl.getUniform(prg, prg.uLightPosition);
mat4.translate(mvMatrix, mvMatrix, lightPos);
}
gl.uniformMatrix4fv(prg.uMVMatrix, false, mvMatrix);
gl.uniformMatrix4fv(prg.uPMatrix, false, pMatrix);
mat4.set(mvMatrix, nMatrix);
mat4.invert(nMatrix, nMatrix);
mat4.transpose(nMatrix, nMatrix);
const t3 = glMatrix.mat3.create();
glMatrix.mat3.fromMat4(t3, nMatrix);
gl.uniformMatrix3fv(prg.uNMatrix, false, t3);
gl.uniform4fv(prg.ambientProduct, object.ambient);
gl.uniform4fv(prg.diffuseProduct, object.diffuse);
gl.uniform4fv(prg.specularProduct, object.specular);
gl.bindBuffer(gl.ARRAY_BUFFER, object.vbo);
gl.vertexAttribPointer(prg.aVertexPosition, 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(prg.aVertexPosition);
gl.bindBuffer(gl.ARRAY_BUFFER, object.nbo);
gl.vertexAttribPointer(prg.aVertexNormal, 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(prg.aVertexNormal);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, object.ibo);
gl.drawElements(gl.TRIANGLES, object.indices.length, gl.UNSIGNED_SHORT, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
}
}
var lastTime = 0;
var angle = 0;
/**
* Updates the angle of rotation by a little bit each time
*/
function animate() {
var timeNow = new Date().getTime();
if (lastTime != 0) {
var elapsed = timeNow - lastTime;
if (animateFlag) angle += (90 * elapsed) / 10000.0;
}
lastTime = timeNow;
}
/**
* Render Loop
*/
function renderLoop() {
drawScene();
animate();
requestAnimationFrame(renderLoop);
}
/**
* Entry point. This function is invoked when the page is loaded
*/
function runWebGLApp() {
//Obtains a WebGL context
gl = document.getElementById("canvas-element-id").getContext('webgl');
//Initializes the program (shaders)
initProgram();
//Initializes lights
initLights();
//Load Scene
loadScene();
//Renders the scene!
renderLoop();
}
$('#slider-shininess').slider({
value: 200,
min: 1,
max: 300,
step: 1,
slide: updateShininess
});
$('#slider-x').slider({
value: 4.5,
min: -50,
max: 50,
step: 0.1,
slide: updateLightPosition,
change: updateLightPosition
});
$('#slider-y').slider({
value: 3.0,
min: 0,
max: 50,
step: 0.1,
slide: updateLightPosition,
change: updateLightPosition
});
$('#slider-z').slider({
value: 15.0,
min: -50,
max: 50,
step: 0.1,
slide: updateLightPosition,
change: updateLightPosition
});
$('#animate-btn').button();
$('#animate-btn').click(
function() {
if ($('#animate-btn:checked').val() == null) {
animateFlag = false;
} else {
animateFlag = true;
}
});
function updateShininess() {
var v = $('#slider-shininess').slider("value");
gl.uniform1f(prg.uShininess, v);
$('#slider-shininess-value').html(v);
}
function updateLightPosition() {
var x = $('#slider-x').slider("value");
var y = $('#slider-y').slider("value");
var z = $('#slider-z').slider("value");
gl.uniform3fv(prg.uLightPosition, [x, y, z]);
$('#slider-x-value').html(x);
$('#slider-y-value').html(y);
$('#slider-z-value').html(z);
}
function updateDistance() {
var d = $('#slider-distance').slider("value");
$('#slider-distance-value').html(distance);
distance = -d;
}
function updateObjectColor(alias, r, g, b) {
var object = getObject(alias);
if (object != null) {
object.diffuse = [r, g, b, 1.0];
}
}
// Calcule les normales des vertex. La normale de chaque vertex est
// la moyenne des triangles voisins.
//
// vertices: la liste des vertex.
// ind: la liste des indices.
// retour: la liste des normales par vertex.
function calcNormals(vertices, ind) {
var x = 0;
var y = 1;
var z = 2;
var v1 = [],
v2 = [],
thisNormal = [];
// initialiser la liste des normales.
var ns = [];
for (var i = 0; i < vertices.length; i++) {
ns[i] = 0.0;
}
for (var i = 0; i < ind.length; i = i + 3) {
//v1 = p1 - p0
v1[x] = vertices[3 * ind[i + 1] + x] - vertices[3 * ind[i] + x];
v1[y] = vertices[3 * ind[i + 1] + y] - vertices[3 * ind[i] + y];
v1[z] = vertices[3 * ind[i + 1] + z] - vertices[3 * ind[i] + z];
// v2 = p2 - p1
v2[x] = vertices[3 * ind[i + 2] + x] - vertices[3 * ind[i] + x];
v2[y] = vertices[3 * ind[i + 2] + y] - vertices[3 * ind[i] + y];
v2[z] = vertices[3 * ind[i + 2] + z] - vertices[3 * ind[i] + z];
// N = v2 x v1 (cross product).
thisNormal[x] = v1[y] * v2[z] - v1[z] * v2[y];
thisNormal[y] = v1[z] * v2[x] - v1[x] * v2[z];
thisNormal[z] = v1[x] * v2[y] - v1[y] * v2[x];
for (j = 0; j < 3; j++) {
// N += thisNormal. on additionne les normales.
ns[3 * ind[i + j] + x] = ns[3 * ind[i + j] + x] + thisNormal[x];
ns[3 * ind[i + j] + y] = ns[3 * ind[i + j] + y] + thisNormal[y];
ns[3 * ind[i + j] + z] = ns[3 * ind[i + j] + z] + thisNormal[z];
}
}
// Normalisation.
for (var i = 0; i < vertices.length; i = i + 3) {
var nn = [];
var len = 0;
for (var j = 0; j < 3; j++) {
nn[j] = ns[i + j];
len += nn[j] * nn[j];
}
// La norme de la normale.
len = Math.sqrt(len);
if (len == 0)
len = 0.00001;
for (var j = 0; j < 3; j++)
ns[i + j] = nn[j] / len;
}
return ns;
}
runWebGLApp();
<link href="https://code.jquery.com/ui/1.12.1/themes/base/jquery-ui.css" rel="stylesheet">
<script id="shader-vs" type="x-shader/x-vertex">
attribute vec3 aVertexPosition;
attribute vec3 aVertexNormal;
// matrice model-view combinee.
uniform mat4 uMVMatrix;
// matrice de projection
uniform mat4 uPMatrix;
// matrice des normales.
uniform mat3 uNMatrix;
// position de la lumiere.
uniform vec3 uLightPosition;
// La normale transformee
varying vec3 vNormal;
// la direction vertex-lumiere
varying vec3 vLightRay;
// la direction camera-vertex
varying vec3 vEyeVec;
uniform vec4 uLightAmbient;
uniform vec4 uLightDiffuse;
uniform vec4 uLightSpecular;
uniform vec4 uMaterialAmbient;
uniform vec4 uMaterialDiffuse;
uniform vec4 uMaterialSpecular;
void main(void) {
vec4 ambientProduct= uLightAmbient* uMaterialAmbient;
vec4 diffuseProduct= uLightDiffuse*uMaterialDiffuse;
vec4 specularProduct= uLightSpecular*uMaterialSpecular;
vec3 pos = (uMVMatrix*vec4(aVertexPosition, 1.0)).xyz;
// position de l'oeil/camera.
const vec3 eyePosition = vec3(0,0,-40);
//Transformed normal position
vNormal = normalize((uNMatrix* aVertexNormal).xyz) ;
//Transformed light position
vec4 light = uMVMatrix * vec4(uLightPosition,1.0);
vec3 lightPos = (uMVMatrix * light).xyz;
//Light position
vLightRay = normalize(pos - lightPos);
//Vector Eye
vEyeVec = -normalize(pos);
//Final vertex position
gl_Position = uPMatrix*uMVMatrix* vec4(aVertexPosition, 1.0);
}
</script>
<script id="shader-fs" type="x-shader/x-fragment">
#ifdef GL_ES
precision highp float;
#endif
varying vec3 vNormal;
varying vec3 vLightRay;
varying vec3 vEyeVec;
uniform vec4 ambientProduct;
uniform vec4 diffuseProduct;
uniform vec4 specularProduct;
uniform float uShininess;
void main(void)
{
vec4 diffuse = max(dot( vNormal,vLightRay), 0.0) * diffuseProduct;
vec3 H = normalize(vLightRay+vEyeVec);
vec4 specular =
pow(max(dot(vNormal, H), 0.0), uShininess) * specularProduct;
if (dot(vLightRay, vNormal) < 0.0)
specular = vec4(0.0, 0.0, 0.0, 1.0);
vec4 fColor = ambientProduct + diffuse + specular;
fColor.a = 1.0;
gl_FragColor =fColor;
}
</script>
<div id='top'>
<div id='contents'>
<div id='canvasContainer'>
<canvas id='canvas-element-id' width='480' height='400'>
Your browser does not support the HTML5 canvas element.
</canvas>
</div>
</div>
<div id='bottom'>
<table style='padding=0px'>
<tr>
<td>X:</td><td id='slider-x-value' width='30px'>4.5</td><td width='150px'><div id='slider-x'/></td>
</tr>
<tr>
<td>Y:</td><td id='slider-y-value' width='30px'>3.0</td><td width='150px'><div id='slider-y'/></td>
</tr>
<tr>
<td>Z:</td> <td id='slider-z-value' width='30px'>15.0</td><td width='150px'><div id='slider-z'/></td>
</tr>
</table>
</div>
<script src="https://cdn.jsdelivr.net/npm/gl-matrix#3.3.0/gl-matrix-min.js"></script>
<script src="https://code.jquery.com/jquery-3.5.1.min.js"></script>
<script src="https://code.jquery.com/ui/1.12.1/jquery-ui.min.js"></script>
<script src="https://greggman.github.io/webgl-lint/webgl-lint.js"
data-gman-debug-helper='
{
"warnUndefinedUniforms": false
}
'
></script>
I am working on a three.js scene that renders points and line segments. The scene renders fine if I use a LineBasicMaterial material for the lines:
/**
* constructor for the gl manager
**/
function World() {
this.renderTarget = document.querySelector('#render-target');
this.scene = this.getScene();
this.camera = this.getCamera();
this.renderer = this.getRenderer();
this.controls = this.getControls();
this.masterCounts = null; // {id: nMasters}
this.edges = null; // 2d array where [[master, app, app]]
this.positions = null; // {id: [x,y]}
this.z = 0; // flat z dim
this.loadData();
this.render();
}
World.prototype.getScene = function() {
return new THREE.Scene();
}
World.prototype.getContainerSize = function() {
var elem = this.renderTarget;
return {
w: elem.clientWidth,
h: elem.clientHeight,
}
}
World.prototype.getCamera = function() {
var size = this.getContainerSize();
var camera = new THREE.PerspectiveCamera(75, size.w/size.h, 0.01, 10);
camera.position.set(0.5, 0.5, -0.67);
return camera;
}
World.prototype.getRenderer = function() {
var renderer = new THREE.WebGLRenderer({antialias: true, alpha: true});
var size = this.getContainerSize();
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(size.w, size.h);
document.querySelector('#render-target').appendChild(renderer.domElement);
return renderer;
}
World.prototype.getControls = function() {
var controls = new THREE.TrackballControls(this.camera, this.renderer.domElement);
controls.zoomSpeed = 0.4;
controls.panSpeed = 0.4;
controls.target.set(0.5, 0.5, 1);
return controls;
}
World.prototype.render = function() {
requestAnimationFrame(this.render.bind(this));
this.renderer.render(this.scene, this.camera);
this.controls.update();
}
World.prototype.getPointScale = function() {
return window.devicePixelRatio * window.innerHeight * 0.00001;
}
World.prototype.loadData = function() {
get('https://s3.amazonaws.com/duhaime/blog/visualizations/line-segments-network/node-positions-twopi.json', function(data) {
this.positions = center(JSON.parse(data));
get('https://s3.amazonaws.com/duhaime/blog/visualizations/line-segments-network/id-to-aggregate-masters.json', function(data) {
this.masterCounts = JSON.parse(data);
get('https://s3.amazonaws.com/duhaime/blog/visualizations/line-segments-network/edges.json', function(data) {
this.edges = JSON.parse(data);
this.addPoints();
this.addEdges();
}.bind(this));
}.bind(this));
}.bind(this));
}
World.prototype.addPoints = function() {
var geometry = new THREE.InstancedBufferGeometry(),
translations = getPointTranslations(this.positions),
colors = getColors(this.positions, this.masterCounts);
geometry.addAttribute('position',
new THREE.BufferAttribute( new Float32Array([0, 0, 0]), true, 3));
geometry.addAttribute('translation',
new THREE.InstancedBufferAttribute(translations, 3, true, 1) );
geometry.addAttribute('target',
new THREE.InstancedBufferAttribute(translations, 3, true, 1) );
geometry.addAttribute('color',
new THREE.InstancedBufferAttribute(colors, 3, true, 1) );
this.points = new THREE.Points(geometry, this.getShaderMaterial());
this.points.frustumCulled = false; // prevent mesh click on drag
this.scene.add(this.points);
}
World.prototype.addEdges = function() {
var indices = [],
positions = [],
idToIndex = {}, // {node id: index in edgePositions}
ids = Object.keys(this.edges);
// flatten edges into [[s,t],[s,t]]
for (var i=0; i<ids.length; i++) {
var idEdges = this.edges[ids[i]];
for (var j=0; j<idEdges.length; j++) {
// here ids[i] is a master node id, idEdges is list of
// apprentice node ids
var masterId = ids[i];
var apprenticeId = idEdges[j];
if (!(masterId in idToIndex)) {
idToIndex[masterId] = positions.length;
positions.push(this.positions[masterId]);
}
if (!(apprenticeId in idToIndex)) {
idToIndex[apprenticeId] = positions.length;
positions.push(this.positions[apprenticeId]);
}
indices = indices.concat([
idToIndex[masterId],
idToIndex[apprenticeId]
]);
}
}
var geometry = new THREE.BufferGeometry(),
translations = new Float32Array(3*positions.length),
iter = 0,
indices = new Uint16Array(indices);
for (var i=0; i<positions.length; i++) {
var e = positions[i];
translations[iter++] = e[0];
translations[iter++] = e[1];
translations[iter++] = this.z;
}
var material = new THREE.LineBasicMaterial({
color: 0xee6559,
opacity: 0.3,
transparent: true,
})
geometry.addAttribute('position',
new THREE.BufferAttribute(translations, 3, true, 1));
geometry.setIndex(new THREE.BufferAttribute(indices, 1, true, 1));
this.lines = new THREE.LineSegments(geometry, material);
this.lines.frustumCulled = false; // prevent mesh click on drag
this.scene.add(this.lines);
}
World.prototype.getShaderMaterial = function() {
return new THREE.RawShaderMaterial({
vertexShader: find('#vertex-shader').textContent,
fragmentShader: find('#fragment-shader').textContent,
uniforms: {
transitionPercent: { type: 'f', value: 0.0 },
pointScale: { type: 'f', value: this.getPointScale(), },
}
});
}
/**
* Helpers
**/
function get(url, handleSuccess, handleErr, handleProgress) {
var xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = function() {
if (xmlhttp.readyState == XMLHttpRequest.DONE) {
if (xmlhttp.status === 200) {
if (handleSuccess) handleSuccess(xmlhttp.responseText)
} else {
if (handleErr) handleErr(xmlhttp)
}
};
};
xmlhttp.onprogress = function(e) {
if (handleProgress) handleProgress(e);
};
xmlhttp.open('GET', url, true);
xmlhttp.send();
};
function find(querySelector) {
return document.querySelector(querySelector);
}
window.addEventListener('resize', function() {
var size = world.getContainerSize();
world.camera.aspect = size.w / size.h;
world.camera.updateProjectionMatrix();
world.renderer.setSize(size.w, size.h);
})
function center(data) {
var ids = Object.keys(data);
// find the domains of each axis in the data
var p = Number.POSITIVE_INFINITY,
n = Number.NEGATIVE_INFINITY,
domains = {x: {min: p, max: n}, y: {min: p, max: n}};
for (var i=0; i<ids.length; i++) {
vals = data[ids[i]];
if (vals[0] < domains.x.min) domains.x.min = vals[0];
if (vals[0] > domains.x.max) domains.x.max = vals[0];
if (vals[1] < domains.y.min) domains.y.min = vals[1];
if (vals[1] > domains.y.max) domains.y.max = vals[1];
}
// center each axis 0:1
for (var i=0; i<ids.length; i++) {
var d = data[ids[i]];
d[0] = (d[0]-domains.x.min)/(domains.x.max-domains.x.min);
d[1] = (d[1]-domains.y.min)/(domains.y.max-domains.y.min);
}
return data;
}
function getPointTranslations(data) {
var ids = Object.keys(data);
var arr = new Float32Array(ids.length*3),
iter = 0;
for (var i=0; i<ids.length; i++) {
arr[iter++] = data[ids[i]][0];
arr[iter++] = data[ids[i]][1];
arr[iter++] = world.z;
}
return arr;
}
function getColors(data, masterCounts) {
var ids = Object.keys(data);
var maxMasters = 0;
for (var i=0; i<ids.length; i++) {
if (masterCounts[ids[i]] > maxMasters) maxMasters = masterCounts[ids[i]];
}
var colors = [
'#1f77b4', '#86abd7', '#cbcdd3', '#f8dba8',
'#eec570', '#eba055', '#ee6559',
];
var arr = new Float32Array(ids.length * 3),
iter = 0;
for (var i=0; i<ids.length; i++) {
var nMasters = Math.min(colors.length, masterCounts[ids[i]] || 0);
var hex = colors[ Math.floor(colors.length * (nMasters / maxMasters)) ];
var c = hexToRgb(hex);
arr[iter++] = c.r / 255;
arr[iter++] = c.g / 255;
arr[iter++] = c.b / 255;
}
return arr;
}
function componentToHex(c) {
var hex = c.toString(16);
return hex.length == 1 ? '0' + hex : hex;
}
function rgbToHex(r, g, b) {
return '#' + componentToHex(r) + componentToHex(g) + componentToHex(b);
}
function hexToRgb(hex) {
var result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
return result ? {
r: parseInt(result[1], 16),
g: parseInt(result[2], 16),
b: parseInt(result[3], 16),
} : {r: 0, g: 0, b: 0};
}
var world = new World();
html,
body {
width: 100%;
height: 100%;
background: linear-gradient(#efefef, #efefef);
}
body {
margin: 0;
overflow: hidden;
}
div#select-target {
padding: 20px 0;
}
select {
-webkit-appearance: none;
-moz-appearance: none;
appearance: none;
font-family: 'Mallory';
text-indent: 0.01px;
text-overflow: '';
border: none;
padding: 7px 40px 7px 10px;
background-image: url(down-caret.png);
background-position: 90% 50%;
background-size: 15px;
background-repeat: no-repeat;
font-size: 1em;
text-transform: uppercase;
border: 1px solid #c7c7c7;
font-family: arial, sans-serif;
}
#render-container {
text-align: center;
max-height: 100%;
max-width: 100%;
padding-bottom: 20px;
}
#render-target {
margin: 0 auto;
width: 700px;
height: 700px;
}
<div id='render-container'>
<div id='select-target'></div>
<div id='render-target'></div>
</div>
<script src='https://s3.amazonaws.com/duhaime/blog/visualizations/line-segments-network/three.min.js'></script>
<script src='https://s3.amazonaws.com/duhaime/blog/visualizations/line-segments-network/trackball-controls.min.js'></script>
<script src='https://s3.amazonaws.com/duhaime/blog/visualizations/line-segments-network/tweenlite.min.js'></script>
<script type='x-shader/x-vertex' id='vertex-shader'>
precision highp float;
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
uniform float transitionPercent;
uniform vec3 cameraPosition;
uniform float pointScale;
attribute vec3 position;
attribute vec3 translation;
attribute vec3 target;
attribute vec3 color;
varying vec3 vColor;
float scalePointZ(in vec4 pos, in vec3 cameraPosition) {
float zDelta = pow(pos[2] - cameraPosition[2], 2.0);
float delta = pow(zDelta, 0.5);
float scaled = pointScale / delta;
return scaled;
}
void main() {
vec3 t1 = position + translation;
vec3 t2 = position + target;
vec3 pos = mix(t1, t2, clamp(transitionPercent, 0.0, 1.0));
vec4 mvPos = modelViewMatrix * vec4(pos, 1.0);
gl_Position = projectionMatrix * mvPos;
gl_PointSize = 6.0;
vColor = color;
}
</script>
<script type='x-shader/x-fragment' id='fragment-shader'>
precision highp float;
varying vec3 vColor;
void main() {
// make points circular
vec2 coord = gl_PointCoord - vec2(0.5);
if (length(coord) > 0.5) discard;
// set point color
gl_FragColor = vec4(0.0, 0.0, 0.0, 1.0);
}
</script>
I have been struggling to achieve the same scene using the rawshadermaterial for the lines, however. Here's one of the ways I've tried to set up the lines for the raw shader material:
World.prototype.addEdges = function() {
var indices = [],
positions = [],
idToIndex = {}, // {node id: index in edgePositions}
ids = Object.keys(this.edges);
// flatten edges into [[s,t],[s,t]]
for (var i=0; i<ids.length; i++) {
var idEdges = this.edges[ids[i]];
for (var j=0; j<idEdges.length; j++) {
// here ids[i] is a master node id, idEdges is list of
// apprentice node ids
var masterId = ids[i];
var apprenticeId = idEdges[j];
if (!(masterId in idToIndex)) {
idToIndex[masterId] = positions.length;
positions.push(this.positions[masterId]);
}
if (!(apprenticeId in idToIndex)) {
idToIndex[apprenticeId] = positions.length;
positions.push(this.positions[apprenticeId]);
}
indices = indices.concat([
idToIndex[masterId],
idToIndex[apprenticeId]
]);
}
}
var geometry = new THREE.BufferGeometry(),
translations = new Float32Array(3*positions.length),
iter = 0,
indices = new Uint16Array(indices);
for (var i=0; i<positions.length; i++) {
var e = positions[i];
translations[iter++] = e[0];
translations[iter++] = e[1];
translations[iter++] = this.z;
}
var material = this.getShaderMaterial().clone();
geometry.addAttribute('position',
new THREE.BufferAttribute(new Float32Array([0, 0, 0]), 3, true, 1));
geometry.addAttribute('translation',
new THREE.BufferAttribute(translations, 3, true, 1));
geometry.addAttribute('target',
new THREE.BufferAttribute(translations, 3, true, 1));
geometry.setIndex(new THREE.BufferAttribute(indices, 1, true, 1));
this.lines = new THREE.LineSegments(geometry, material);
this.lines.frustumCulled = false; // prevent mesh click on drag
this.scene.add(this.lines);
}
However this renders nothing. Does anyone know how I can render the lines above with the raw shader material defined above? Any pointers or suggestions would be very helpful!
I think this happens because you are using gl_PointCoord in the fragment shader although you are not rendering points but lines. If I remove the following two lines of code, your lines are rendered:
vec2 coord = gl_PointCoord - vec2(0.5);
if (length(coord) > 0.5) discard;
Demo: https://jsfiddle.net/Ldynhxkq/
Maybe it's better to use different shader programs for both primitives.
I am lately experimenting and learning webgl and shaders, as i am trying to build animation for the website background. I was able to port simple examples from shadertoy into three.js to play with it. Now i am trying to better understand more advanced examples and i am struggling with this particlar example:
https://www.shadertoy.com/view/4sfBWj
I am aware that to port shader program to three.js you need to:
create three.js basic setup with new THREE.PlaneGeometry()
create uniforms for iTime and iResolution
create vertex and fragment shaders script tags
populate fragment shader with shadertoy contents (image section)
populate vertex shader with generic script
change names to gl_FragColor and gl_FragCoord
change name of the function to void main(void)
if there is some texture used in one or more channels then
load texture with new THREE.TextureLoader() and create uniform for iChannel0
Basic examples will be good to go with above. However the one i linked has:
Buffer A
Buffer B
and both of these also include shader programs and main functions that runs them, how do i deal with it to be able to port it to three.js?
my current progress:
var container;
var camera, scene0, scene1, scene2, renderer;
var uniforms0, uniforms1, uniforms2;
var startTime;
var renderTarget0, renderTarget1;
var clock = new THREE.Clock();
init();
animate();
function init() {
container = document.getElementById( 'container' );
startTime = Date.now();
camera = new THREE.Camera();
camera.position.z = 1;
scene0 = new THREE.Scene();
scene1 = new THREE.Scene();
scene2 = new THREE.Scene();
renderTarget0 = new THREE.WebGLRenderTarget(window.innerWidth, window.innerHeight);
renderTarget1 = new THREE.WebGLRenderTarget(window.innerWidth, window.innerHeight);
/* scene0 */
var geometry0 = new THREE.PlaneGeometry(700, 394, 1, 1);
uniforms0 = {
iTime: { type: "f", value: 1.0 },
iResolution: { type: "v1", value: new THREE.Vector2(), }
};
var material0 = new THREE.ShaderMaterial( {
uniforms: uniforms0,
vertexShader: document.getElementById( 'vs0' ).textContent,
fragmentShader: document.getElementById( 'fs0' ).textContent
});
/* scene0 */
var mesh0 = new THREE.Mesh( geometry0, material0 );
/* scene1 */
var geometry1 = new THREE.PlaneGeometry(700, 394, 1, 1);
uniforms1 = {
iTime: { type: "f", value: 1.0 },
iResolution: { type: "v1", value: new THREE.Vector2(), }
};
var material1 = new THREE.ShaderMaterial( {
uniforms: uniforms1,
vertexShader: document.getElementById( 'vs1' ).textContent,
fragmentShader: document.getElementById( 'fs1' ).textContent,
iChannel0: {type: 't', value: renderTarget0 }
});
var mesh1 = new THREE.Mesh( geometry1, material1 );
/* scene1 */
/* scene2 */
var geometry2 = new THREE.PlaneGeometry(700, 394, 1, 1);
uniforms2 = {
iTime: { type: "f", value: 1.0 },
iResolution: { type: "v1", value: new THREE.Vector2(), }
};
var material2 = new THREE.ShaderMaterial( {
uniforms: uniforms1,
vertexShader: document.getElementById( 'vs2' ).textContent,
fragmentShader: document.getElementById( 'fs2' ).textContent,
iChannel0: {type: 't', value: renderTarget0 },
iChannel1: {type: 't', value: renderTarget1 }
});
var mesh2 = new THREE.Mesh( geometry2, material2 );
/* scene2 */
scene0.add( mesh0 );
scene1.add( mesh1 );
scene2.add( mesh2 );
renderer = new THREE.WebGLRenderer();
container.appendChild( renderer.domElement );
onWindowResize();
window.addEventListener( 'resize', onWindowResize, false );
}
function onWindowResize( event ) {
uniforms0.iResolution.value.x = window.innerWidth;
uniforms0.iResolution.value.y = window.innerHeight;
uniforms1.iResolution.value.x = window.innerWidth;
uniforms1.iResolution.value.y = window.innerHeight;
uniforms2.iResolution.value.x = window.innerWidth;
uniforms2.iResolution.value.y = window.innerHeight;
renderer.setSize( window.innerWidth, window.innerHeight );
}
function animate() {
requestAnimationFrame( animate );
render();
}
function render() {
//renderer.render(scene0, camera, renderTarget0);
//renderer.render(scene1, camera, renderTarget1);
uniforms1.iChannel0.value = rendertarget0.texture;
uniforms2.iChannel0.value = rendertarget0.texture;
uniforms2.iChannel1.value = rendertarget1.texture;
uniforms0.iTime.value += clock.getDelta();
uniforms1.iTime.value += clock.getDelta();
uniforms2.iTime.value += clock.getDelta();
//renderer.render( scene2, camera );
}
body {
margin:0;
padding:0;
overflow:hidden;
}
<script src="//threejs.org/build/three.min.js"></script>
<div id="container"></div>
<!-- BREAK --->
<script id="vs0" type="x-shader/x-vertex">
void main() {
vec4 mvPosition = modelViewMatrix * vec4(position, 1.0 );
gl_Position = projectionMatrix * mvPosition;
}
</script>
<script id="fs0" type="x-shader/x-fragment">
uniform vec2 iResolution;
uniform float iTime;
const mat2 m = mat2( 0.8, 0.6, -0.6, 0.8 );
const mat3 m3 = mat3( 0.8, 0.6, 0.0, -0.6, 0.80, 0.0, 0.0, 0.0, 1.0) *
mat3( 1.0, 0.0, 0.0, 0.0, -0.60, 0.80, 0.0, 0.8, 0.6) *
mat3( 0.8, 0.6, 0.0, -0.6, 0.80, 0.0, 0.0, 0.0, 1.0) *
mat3( 1.0, 0.0, 0.0, 0.0, -0.60, 0.80, 0.0, 0.8, 0.6);
float time;
float n1f0(float p) {
return fract(sin(p * 1.7227636) * 8.03e2);
}
float n1f1(float p) {
return fract(sin(p * 1.42736 + 1.12) * 5.1e2);
}
float n1f2(float p) {
return fract(sin(p * 1.22712 + 12.161) * 5.2e2);
}
float n3f(vec3 p) {
return fract(n1f0(p.x) + n1f1(p.y) + n1f2(p.z) + n1f0(p.x * 1.613) + n1f1(p.y * 3.112) + n1f2(p.z * 4.112));
}
float n3(vec3 p) {
vec3 b = floor(p);
vec3 e = b + vec3(1.0);
vec3 f = smoothstep(vec3(0.0), vec3(1.0), fract(p));
float c000 = n3f(b);
float c001 = n3f(vec3(b.x, b.y, e.z));
float c010 = n3f(vec3(b.x, e.y, b.z));
float c011 = n3f(vec3(b.x, e.y, e.z));
float c100 = n3f(vec3(e.x, b.y, b.z));
float c101 = n3f(vec3(e.x, b.y, e.z));
float c110 = n3f(vec3(e.x, e.y, b.z));
float c111 = n3f(e);
vec4 z = mix(vec4(c000, c100, c010, c110), vec4(c001, c101, c011, c111), f.z);
vec2 yz = mix(z.xy, z.zw, f.y);
return mix(yz.x, yz.y, f.x);
}
float fbm4( vec3 p )
{
float f = 0.0;
p = m3 * p;
f += 0.5000*n3( p ); p = m3*p*2.02;
f += 0.2500*n3( p ); p = m3*p*2.03;
f += 0.1250*n3( p ); p = m3*p*2.01;
f += 0.0625*n3( p );
return f/0.9375;
}
float fbm4( vec2 p )
{
return fbm4(vec3(p, time));
}
float fbm6( vec3 p )
{
float f = 0.0;
p = m3 * p;
f += 0.500000*n3( p ); p = m3*p*2.02;
f += 0.250000*n3( p ); p = m3*p*2.03;
f += 0.125000*n3( p ); p = m3*p*2.01;
f += 0.062500*n3( p ); p = m3*p*2.04;
f += 0.031250*n3( p ); p = m3*p*2.01;
f += 0.015625*n3( p );
return f/0.984375;
}
float fbm6( vec2 p )
{
return fbm6(vec3(p, time));
}
float grid(vec2 p) {
p = sin(p * 3.1415);
return smoothstep(-0.01, 0.01, p.x * p.y);
}
void main(void) {
time = iTime * 0.7;
vec2 q = gl_FragCoord.xy / iResolution.xy;
vec2 p = -1.0 + 2.0 * q;
p.x *= iResolution.x/iResolution.y;
p.y *= 0.3;
p.y -= time * 1.5;
float tc = time * 1.2;
float tw1 = time * 2.5;
float tw2 = time * 0.6;
vec3 vw1 = vec3(p, tw1);
vw1.y *= 2.8;
vec2 ofs1 = vec2(fbm4(vw1), fbm4(vw1 + vec3(10.0, 20.0, 50.0)));
ofs1.y *= 0.3;
ofs1.x *= 1.3;
vec3 vw2 = vec3(p, tw2);
vw2.y *= 0.8;
vec2 ofs2 = vec2(fbm4(vw2), fbm4(vw2 + vec3(10.0, 20.0, 50.0)));
ofs2.y *= 0.3;
ofs2.x *= 1.3;
vec2 vs = (p + ofs1 * 0.5 + ofs2 * 0.9) * 4.0;
vec3 vc = vec3(vs, tc);
float l;
l = fbm6(vc);
l = smoothstep(0.0, 1.0, l);
l = max(0.0, (l - pow(q.y * 0.8, 0.6)) * 1.8);
float r = pow(l , 1.5);
float g = pow(l , 3.0);
float b = pow(l , 6.0);
//r = grid(vs);
gl_FragColor = vec4( r, g, b, 1.0 );
}
</script>
<!-- BREAK --->
<script id="vs1" type="x-shader/x-vertex">
void main() {
vec4 mvPosition = modelViewMatrix * vec4(position, 1.0 );
gl_Position = projectionMatrix * mvPosition;
}
</script>
<script id="fs1" type="x-shader/x-fragment">
uniform vec2 iResolution;
uniform float iTime;
uniform sampler2D iChannel0;
#ifdef GL_ES
precision mediump float;
#endif
#define SIGMA 5.0
float normpdf(in float x, in float sigma)
{
return 0.39894*exp(-0.5*x*x/(sigma*sigma))/sigma;
}
void main(void) {
vec3 c = texture2D(iChannel0, gl_FragCoord.xy / iResolution.xy).rgb;
//declare stuff
const int mSize = int(SIGMA * 11.0/7.0);
const int kSize = (mSize-1)/2;
float kernel[mSize];
vec3 finalColor = vec3(0.0);
//create the 1-D kernel
float sigma = SIGMA;
float Z = 0.0;
for (int j = 0; j <= kSize; ++j)
{
kernel[kSize+j] = kernel[kSize-j] = normpdf(float(j), sigma);
}
//get the normalization factor (as the gaussian has been clamped)
for (int j = 0; j < mSize; ++j)
{
Z += kernel[j];
}
//read out the texels
for (int i=-kSize; i <= kSize; ++i)
{
for (int j=-kSize; j <= kSize; ++j)
{
finalColor += kernel[kSize+j]*kernel[kSize+i]*texture2D(iChannel0, (gl_FragCoord.xy+vec2(float(i),float(j))) / iResolution.xy).rgb;
}
}
finalColor /= Z*Z;
//finalColor = c + finalColor * 0.3;
gl_FragColor = vec4(finalColor, 1.0);
}
</script>
<!-- BREAK --->
<script id="vs2" type="x-shader/x-vertex">
void main() {
vec4 mvPosition = modelViewMatrix * vec4(position, 1.0 );
gl_Position = projectionMatrix * mvPosition;
}
</script>
<script id="fs2" type="x-shader/x-fragment">
uniform vec2 iResolution;
uniform float iTime;
uniform sampler2D iChannel0;
uniform sampler2D iChannel1;
#ifdef GL_ES
precision mediump float;
#endif
#define SIGMA 5.0
float normpdf(in float x, in float sigma)
{
return 0.39894*exp(-0.5*x*x/(sigma*sigma))/sigma;
}
void main(void) {
vec3 c = texture2D(iChannel0, gl_FragCoord.xy / iResolution.xy).rgb;
//gl_FragColor = vec4(c, 1.0);
//return;
//declare stuff
const int mSize = int(SIGMA * 11.0/7.0);
const int kSize = (mSize-1)/2;
float kernel[mSize];
vec3 finalColor = vec3(0.0);
//create the 1-D kernel
float sigma = SIGMA;
float Z = 0.0;
for (int j = 0; j <= kSize; ++j)
{
kernel[kSize+j] = kernel[kSize-j] = normpdf(float(j), sigma);
}
//get the normalization factor (as the gaussian has been clamped)
for (int j = 0; j < mSize; ++j)
{
Z += kernel[j];
}
//read out the texels
for (int i=-kSize; i <= kSize; ++i)
{
for (int j=-kSize; j <= kSize; ++j)
{
finalColor += kernel[kSize+j]*kernel[kSize+i]*texture2D(iChannel1, (gl_FragCoord.xy+vec2(float(i),float(j))) / iResolution.xy).rgb;
}
}
finalColor /= Z*Z;
finalColor = c + pow(finalColor, vec3(0.5)) * 0.5;
gl_FragColor = vec4(finalColor, 1.0);
}
</script>
This example uses multiple renders per frame. It works like this:
render shaderA to buffer
pass output to shaderB
render shaderB to buffer
pass output to shaderC
render shaderC to canvas
To replicate this in Three.js, you'll need a WebGLRenderTarget as intermediary to pass the output from one render as a texture to the next shader. Here's the pseudocode with only 2 renders, you'll need to extend it if you need more:
var renderer = new WebGLRenderer(w, h, ...);
var scene0 = new Scene();
var scene1 = new Scene();
var plane0 = new THREE.PlaneBufferGeometry(1, 1);
var plane1 = new THREE.PlaneBufferGeometry(1, 1);
// ... continue building materials, shaders, etc
// Add plane mesh to its corresponding scene
scene0.add(planeMesh0);
scene1.add(planeMesh1);
// You should only need one camera if they're both in the same position.
var cam = new Camera();
// renderTarget will store the first buffer
var renderTarget = new WebGLRenderTarget(w, h);
update() {
// This pass will render the first shader
// Its output will be drawn on the rendertarget's texture
renderer.render(scene0, cam, renderTarget);
// We assign the output of the first render pass to the second plane
plane1.uniforms.texture.value = rendertarget.texture;
// Now we render the second shader to the canvas
renderer.render(scene1, cam);
}
Keep in mind you have to render separate scenes in each pass to avoid recursion issues, so you'll have to add each plane to a separate scene. To learn more about WebGLRenderTarget you can read about it in the docs
/**
* A class creating buffers for a textured box to render it with WebGL
*/
class RasterTextureBox {
/**
* Creates all WebGL buffers for the textured box
* 6 ------- 7
* / | / |
* 3 ------- 2 |
* | | | |
* | 5 -----|- 4
* | / | /
* 0 ------- 1
* looking in negative z axis direction
* #param {WebGLContext} gl - The canvas' context
* #param {Vector} minPoint - The minimal x,y,z of the box
* #param {Vector} maxPoint - The maximal x,y,z of the box
*/
constructor(gl, minPoint, maxPoint, texture) {
this.gl = gl;
const mi = minPoint;
const ma = maxPoint;
let vertices = [
// front
mi.x, mi.y, ma.z, ma.x, mi.y, ma.z, ma.x, ma.y, ma.z,
ma.x, ma.y, ma.z, mi.x, ma.y, ma.z, mi.x, mi.y, ma.z,
// back
ma.x, mi.y, mi.z, mi.x, mi.y, mi.z, mi.x, ma.y, mi.z,
mi.x, ma.y, mi.z, ma.x, ma.y, mi.z, ma.x, mi.y, mi.z,
// right
ma.x, mi.y, ma.z, ma.x, mi.y, mi.z, ma.x, ma.y, mi.z,
ma.x, ma.y, mi.z, ma.x, ma.y, ma.z, ma.x, mi.y, ma.z,
// top
mi.x, ma.y, ma.z, ma.x, ma.y, ma.z, ma.x, ma.y, mi.z,
ma.x, ma.y, mi.z, mi.x, ma.y, mi.z, mi.x, ma.y, ma.z,
// left
mi.x, mi.y, mi.z, mi.x, mi.y, ma.z, mi.x, ma.y, ma.z,
mi.x, ma.y, ma.z, mi.x, ma.y, mi.z, mi.x, mi.y, mi.z,
// bottom
mi.x, mi.y, mi.z, ma.x, mi.y, mi.z, ma.x, mi.y, ma.z,
ma.x, mi.y, ma.z, mi.x, mi.y, ma.z, mi.x, mi.y, mi.z
];
const vertexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
this.vertexBuffer = vertexBuffer;
this.elements = vertices.length / 3;
let cubeTexture = gl.createTexture();
let cubeImage = new Image();
cubeImage.onload = function () {
gl.bindTexture(gl.TEXTURE_2D, cubeTexture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, cubeImage);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.bindTexture(gl.TEXTURE_2D, null);
}
cubeImage.src = texture;
this.texBuffer = cubeTexture;
let uv = [
// front
0, 0, 1, 0, 1, 1,
1, 1, 0, 1, 0, 0,
// back
0, 0, 1, 0, 1, 1,
1, 1, 0, 1, 0, 0,
// right
0, 0, 1, 0, 1, 1,
1, 1, 0, 1, 0, 0,
// top
0, 0, 1, 0, 1, 1,
1, 1, 0, 1, 0, 0,
// left
0, 0, 1, 0, 1, 1,
1, 1, 0, 1, 0, 0,
// bottom
0, 0, 1, 0, 1, 1,
1, 1, 0, 1, 0, 0,
];
let uvBuffer = this.gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, uvBuffer);
gl.bufferData(this.gl.ARRAY_BUFFER, new Float32Array(uv),
gl.STATIC_DRAW);
this.texCoords = uvBuffer;
}
render(shader) {
this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.vertexBuffer);
const positionLocation = shader.getAttributeLocation("a_position");
this.gl.enableVertexAttribArray(positionLocation);
this.gl.vertexAttribPointer(positionLocation, 3, this.gl.FLOAT, false, 0, 0);
// Bind the texture coordinates in this.texCoords
// to their attribute in the shader
this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.texCoords);
const texCoordLocation = shader.getAttributeLocation("a_texCoord");
this.gl.enableVertexAttribArray(texCoordLocation);
this.gl.vertexAttribPointer(texCoordLocation, 2, this.gl.FLOAT, false, 0, 0);
this.gl.activeTexture(gl.TEXTURE0);
this.gl.bindTexture(gl.TEXTURE_2D, this.texBuffer);
shader.getUniformInt("sampler").set(0);
this.gl.drawArrays(this.gl.TRIANGLES, 0, this.elements);
this.gl.disableVertexAttribArray(positionLocation);
//disable texture vertex attrib array
this.gl.disableVertexAttribArray(texCoordLocation);
}
}
/**
* Class representing a 4x4 Matrix
*/
class Matrix {
constructor(mat) {
this.data = new Float32Array(16);
for (let row = 0; row < 4; row++) {
for (let col = 0; col < 4; col++) {
this.data[row * 4 + col] = mat[col * 4 + row];
}
}
}
getVal(row, col) {
return this.data[col * 4 + row];
}
setVal(row, col, val) {
this.data[col * 4 + row] = val;
}
static translation(translation) {
let m = Matrix.identity();
m.setVal(0, 3, translation.x);
m.setVal(1, 3, translation.y);
m.setVal(2, 3, translation.z);
return m;
}
static rotation(axis, angle) {
let m = Matrix.identity()
let sin = Math.sin(angle);
let cos = Math.cos(angle);
if (axis.x != 0) {
m.setVal(1, 1, cos);
m.setVal(1, 2, -sin);
m.setVal(2, 1, sin);
m.setVal(2, 2, cos);
} else if (axis.y != 0) {
m.setVal(0, 0, cos);
m.setVal(0, 2, sin);
m.setVal(2, 0, -sin);
m.setVal(2, 2, cos);
} else {
m.setVal(0, 0, cos);
m.setVal(0, 1, -sin);
m.setVal(1, 0, sin);
m.setVal(1, 1, cos);
}
return m;
}
static scaling(scale) {
let m = Matrix.identity();
m.setVal(0, 0, scale.x);
m.setVal(1, 1, scale.y);
m.setVal(2, 2, scale.z);
return m;
}
/**
* Constructs a lookat matrix
* #param {Vector} eye - The position of the viewer
* #param {Vector} center - The position to look at
* #param {Vector} up - The up direction
* #return {Matrix} The resulting lookat matrix
*/
static lookat(eye, center, up) {
let fBig = center.sub(eye);
// Vom Eye zum Center Punkt
let f = fBig.normalised();
// UP-Vektor
let upNorm = up.normalised();
// Kreuzprodukt
let s = f.cross(upNorm);
let u = s.normalised().cross(f);
// s, u und f sind die Vektoren des Kamerakoordinatensystems
// Lookat Matrix, 3x3 betrifft Rotation und Skalierung
let mat = new Matrix([
s.x, s.y, s.z, 0,
u.x, u.y, u.z, 0, -f.x, -f.y, -f.z, 0,
0, 0, 0, 1
]);
// Noch weitere Berechnungen? Translation
let trans = Matrix.translation(eye.mul(-1));
mat = mat.mul(trans);
return mat;
}
static frustum(left, right, bottom, top, near, far) {
// TODO [exercise 9]
const n2 = 2 * near;
const rpl = right + left;
const rml = right - left;
const tpb = top + bottom;
const tmb = top - bottom;
const fpn = far + near;
const fmn = far - near;
const n2f = n2 * far;
return new Matrix([
n2 / rml, 0, rpl / rml, 0,
0, n2 / tmb, tpb / tmb, 0,
0, 0, -fpn / fmn, -n2f / fmn,
0, 0, -1, 0
]);
}
static perspective(fovy, aspect, near, far) {
// frustum Methode verwenden. Foliensatz 10
const top = near * Math.tan((Math.PI / 180) * (fovy / 2));
const bottom = -top;
const right = top * aspect;
const left = -right;
return Matrix.frustum(left, right, bottom, top, near, far);
}
/**
* Returns the identity matrix
*/
static identity() {
return new Matrix([
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
]);
}
mul(other) {
if (other instanceof Matrix) {
// [exercise 7]
let m = Matrix.identity();
for (let row = 0; row < 4; row++) {
for (let col = 0; col < 4; col++) {
let sum = 0;
for (let i = 0; i < 4; i++) {
sum += this.getVal(row, i) * other.getVal(i, col);
}
m.setVal(row, col, sum);
}
}
return m;
} else {
let v = [0, 0, 0, 0];
for (let row = 0; row < 4; row++) {
for (let i = 0; i < 4; i++) {
v[row] += this.getVal(row, i) * other.valueOf()[i];
}
}
return new Vector(v[0], v[1], v[2], v[3]);
}
}
transpose() {
let m = Matrix.identity();
for (let row = 0; row < 4; row++) {
for (let col = 0; col < 4; col++) {
m.setVal(row, col, this.getVal(col, row));
}
}
return m;
}
invert() {
let mat = this.data;
let dst = new Float32Array(16); //ret.getValues();
let tmp = new Float32Array(12);
/* temparray for pairs */
let src = new Float32Array(16); //new float[16];
/* array of transpose source matrix */
let det;
for (let i = 0; i < 4; i++) {
src[i] = mat[i * 4];
src[i + 4] = mat[i * 4 + 1];
src[i + 8] = mat[i * 4 + 2];
src[i + 12] = mat[i * 4 + 3];
}
tmp[0] = src[10] * src[15];
tmp[1] = src[11] * src[14];
tmp[2] = src[9] * src[15];
tmp[3] = src[11] * src[13];
tmp[4] = src[9] * src[14];
tmp[5] = src[10] * src[13];
tmp[6] = src[8] * src[15];
tmp[7] = src[11] * src[12];
tmp[8] = src[8] * src[14];
tmp[9] = src[10] * src[12];
tmp[10] = src[8] * src[13];
tmp[11] = src[9] * src[12];
dst[0] = tmp[0] * src[5] + tmp[3] * src[6] + tmp[4] * src[7];
dst[0] -= tmp[1] * src[5] + tmp[2] * src[6] + tmp[5] * src[7];
dst[1] = tmp[1] * src[4] + tmp[6] * src[6] + tmp[9] * src[7];
dst[1] -= tmp[0] * src[4] + tmp[7] * src[6] + tmp[8] * src[7];
dst[2] = tmp[2] * src[4] + tmp[7] * src[5] + tmp[10] * src[7];
dst[2] -= tmp[3] * src[4] + tmp[6] * src[5] + tmp[11] * src[7];
dst[3] = tmp[5] * src[4] + tmp[8] * src[5] + tmp[11] * src[6];
dst[3] -= tmp[4] * src[4] + tmp[9] * src[5] + tmp[10] * src[6];
dst[4] = tmp[1] * src[1] + tmp[2] * src[2] + tmp[5] * src[3];
dst[4] -= tmp[0] * src[1] + tmp[3] * src[2] + tmp[4] * src[3];
dst[5] = tmp[0] * src[0] + tmp[7] * src[2] + tmp[8] * src[3];
dst[5] -= tmp[1] * src[0] + tmp[6] * src[2] + tmp[9] * src[3];
dst[6] = tmp[3] * src[0] + tmp[6] * src[1] + tmp[11] * src[3];
dst[6] -= tmp[2] * src[0] + tmp[7] * src[1] + tmp[10] * src[3];
dst[7] = tmp[4] * src[0] + tmp[9] * src[1] + tmp[10] * src[2];
dst[7] -= tmp[5] * src[0] + tmp[8] * src[1] + tmp[11] * src[2];
tmp[0] = src[2] * src[7];
tmp[1] = src[3] * src[6];
tmp[2] = src[1] * src[7];
tmp[3] = src[3] * src[5];
tmp[4] = src[1] * src[6];
tmp[5] = src[2] * src[5];
tmp[6] = src[0] * src[7];
tmp[7] = src[3] * src[4];
tmp[8] = src[0] * src[6];
tmp[9] = src[2] * src[4];
tmp[10] = src[0] * src[5];
tmp[11] = src[1] * src[4];
dst[8] = tmp[0] * src[13] + tmp[3] * src[14] + tmp[4] * src[15];
dst[8] -= tmp[1] * src[13] + tmp[2] * src[14] + tmp[5] * src[15];
dst[9] = tmp[1] * src[12] + tmp[6] * src[14] + tmp[9] * src[15];
dst[9] -= tmp[0] * src[12] + tmp[7] * src[14] + tmp[8] * src[15];
dst[10] = tmp[2] * src[12] + tmp[7] * src[13] + tmp[10] * src[15];
dst[10] -= tmp[3] * src[12] + tmp[6] * src[13] + tmp[11] * src[15];
dst[11] = tmp[5] * src[12] + tmp[8] * src[13] + tmp[11] * src[14];
dst[11] -= tmp[4] * src[12] + tmp[9] * src[13] + tmp[10] * src[14];
dst[12] = tmp[2] * src[10] + tmp[5] * src[11] + tmp[1] * src[9];
dst[12] -= tmp[4] * src[11] + tmp[0] * src[9] + tmp[3] * src[10];
dst[13] = tmp[8] * src[11] + tmp[0] * src[8] + tmp[7] * src[10];
dst[13] -= tmp[6] * src[10] + tmp[9] * src[11] + tmp[1] * src[8];
dst[14] = tmp[6] * src[9] + tmp[11] * src[11] + tmp[3] * src[8];
dst[14] -= tmp[10] * src[11] + tmp[2] * src[8] + tmp[7] * src[9];
dst[15] = tmp[10] * src[10] + tmp[4] * src[8] + tmp[9] * src[9];
dst[15] -= tmp[8] * src[9] + tmp[11] * src[10] + tmp[5] * src[8];
det = src[0] * dst[0] + src[1] * dst[1] + src[2] * dst[2] + src[3] * dst[3];
if (det == 0.0) {
throw new Error("singular matrix is not invertible");
}
/* calculate matrix inverse */
det = 1 / det;
for (let j = 0; j < 16; j++) {
dst[j] *= det;
}
let ret = Matrix.identity();
ret.data = dst;
return ret;
}
}
/**
* Class representing a vector in 4D space
*/
class Vector {
/**
* Create a vector
* #param {number} x - The x component
* #param {number} y - The y component
* #param {number} z - The z component
* #param {number} w - The w component
* #return {number} The resulting vector
*/
constructor(x, y, z, w) {
this.data = [x, y, z, w];
}
//has getter and setter
add(other) {
return new Vector(
this.x + other.x,
this.y + other.y,
this.z + other.z,
this.w + other.w
);
}
sub(other) {
return new Vector(
this.x - other.x,
this.y - other.y,
this.z - other.z,
this.w - other.w
);
}
mul(other) {
return new Vector(
this.x * other,
this.y * other,
this.z * other,
this.w
);
}
div(other) {
return new Vector(
this.x / other,
this.y / other,
this.z / other,
this.w
);
}
dot(other) {
if (other instanceof Vector) {
return this.x * other.x + this.y * other.y + this.z * other.z;
} else {
throw new Error("Dot product only works with vectors!");
}
}
cross(other) {
if (other instanceof Vector) {
return new Vector(
this.y * other.z - this.z * other.y,
this.z * other.x - this.x * other.z,
this.x * other.y - this.y * other.x,
0
);
} else {
throw new Error("Dot product only works with vectors!");
}
}
valueOf() {
return this.data;
}
normalised() {
const l = this.length;
return this.div(l);
}
equals(other) {
return (
Math.abs(this.x - other.x) <= Number.EPSILON &&
Math.abs(this.y - other.y) <= Number.EPSILON &&
Math.abs(this.z - other.z) <= Number.EPSILON &&
((!this.w && !other.w) || Math.abs(this.w - other.w) <= Number.EPSILON)
);
}
get length() {
return Math.sqrt(this.x * this.x + this.y * this.y + this.z * this.z);
}
}
/**
* Class representing a Node in a Scenegraph
*/
class Node {
/**
* Accepts a visitor according to the visitor pattern
* #param {Visitor} visitor - The visitor
*/
accept(visitor) { }
}
/**
* Class representing a GroupNode in the Scenegraph.
* A GroupNode holds a transformation and is able
* to have child nodes attached to it.
* #extends Node
*/
class GroupNode extends Node {
/**
* Constructor
* #param {Matrix} mat - A matrix describing the node's transformation
*/
constructor(mat) {
super();
this.matrix = mat;
// TODO [exercise 8]
this.children = [];
}
/**
* Accepts a visitor according to the visitor pattern
* #param {Visitor} visitor - The visitor
*/
accept(visitor) {
// TODO [exercise 8]
visitor.visitGroupNode(this);
}
/**
* Adds a child node
* #param {Node} childNode - The child node to add
*/
add(childNode) {
// TODO [exercise 8]
this.children.push(childNode);
}
}
/**
* Class representing a Textured Axis Aligned Box in the Scenegraph
* #extends Node
*/
class TextureBoxNode extends Node {
constructor(minPoint, maxPoint, texture) {
super();
this.minPoint = minPoint;
this.maxPoint = maxPoint;
this.texture = texture;
}
accept(visitor) {
// TODO [exercise 8]
visitor.visitTextureBoxNode(this);
}
}
//Texture Fragment Shader
precision mediump float;
uniform sampler2D sampler;
varying vec2 v_texCoord;
void main( void ) {
//gl_FragColor = vec4( 0.0, 0.0, 0.5, 1.0 );
// Read fragment color from texture
// TODO [exercise 9]
gl_FragColor = texture2D(sampler, vec2(v_texCoord.s, v_texCoord.t));
}
//Texture Vertex Shader
attribute vec3 a_position;
attribute vec2 a_texCoord;
varying vec2 v_texCoord;
uniform mat4 M;
uniform mat4 V;
uniform mat4 P;
void main() {
gl_Position = P * V * M * vec4( a_position, 1.0 );
v_texCoord = a_texCoord;
}
// Phong Vertex Shader
attribute vec3 a_position;
attribute vec3 a_normal;
// Pass color as attribute and forward it
// to the fragment shader
attribute vec4 a_color;
uniform mat4 M;
uniform mat4 V;
uniform mat4 P;
uniform mat4 N; // normal matrix
varying vec3 v_normal;
// Pass the vertex position in view space
// to the fragment shader
// TODO [exercise 9]
varying vec4 v_position;
varying vec4 v_color;
void main() {
gl_Position = P * V * M * vec4( a_position, 1.0 );
// Pass the color and transformed vertex position through
v_position = gl_Position;
v_color = a_color;
v_normal = (N * vec4(a_normal, 0)).xyz;
}
//Phong Fragment Shader
//precision mediump float;
// TODO [exercise 5]
//void main( void ) {
//gl_FragColor = vec4( 0.0, 0.0, 0.5, 1.0 );
// TODO [exercise 5]
//}
// Wird mindestens einmal pro Pixel ausgefuehrt
precision mediump float;
// TODO [exercise 5]
varying vec4 v_color;
varying vec4 v_position;
varying vec3 v_normal;
const vec3 lightPos = vec3(0.2,-1.0,-1.0);
const float shininess = 16.0;
const float k_a = 1.0;
const float k_d = 0.6;
const float k_s = 0.3;
// Farbe von Vertex shader durchreichen und Interpolieren
void main( void ) {
// Rot, Gruen, Blau, Alpha
//gl_FragColor = vec4( 0.0, 0.0, 0.5, 1.0 );
// TODO [exercise 5]
vec3 vertPos = vec3(v_position) / v_position.w;
vec3 N = normalize(v_normal);
vec3 L = normalize(lightPos - vertPos);
vec4 L_j = vec4(1,1,1,1);
vec4 diffuse = L_j * max(dot(N, L), 0.0);
vec3 R = reflect(-L, N);
vec3 V = normalize(-vertPos);
float specAngle = max(dot(R, V), 0.0);
vec4 specular = L_j * pow(specAngle, shininess);
vec4 color = vec4(k_a * v_color + k_d * diffuse + k_s * specular);
gl_FragColor = color;
}
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>ICG-11 Animation</title>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css" integrity="sha384-BVYiiSIFeK1dGmJRAkycuHAHRg32OmUcww7on3RYdg4Va+PmSTsz/K68vbdEjh4u"
crossorigin="anonymous">
</head>
<body>
<div class="container text-center">
<h1>ICG Animation</h1>
<hr>
<p>Implement a Rasteriser with WebGL using a Scenegraph.</p>
<canvas id="rasteriser" width="500" height="500"></canvas>
<script src="vector.js"></script>
<script src="raster-texture-box.js"></script>
<script src="matrix.js"></script>
<script src="nodes.js"></script>
<script src="rastervisitor.js"></script>
<script src="shader.js"></script>
<script src="animation-nodes.js"></script>
<script>
const canvas = document.getElementById("rasteriser");
const gl = canvas.getContext("webgl");
// construct scene graph
const sg = new GroupNode(Matrix.scaling(new Vector(0.2, 0.2, 0.2)));
const gn1 = new GroupNode(Matrix.translation(new Vector(1, 1, 0)));
sg.add(gn1);
let gn2 = new GroupNode(Matrix.translation(new Vector(-.7, -0.4, .1)));
sg.add(gn2);
const cube = new TextureBoxNode(
new Vector(-1, -1, -1, 1),
new Vector(1, 1, 1, 1),
'diffuse.png'
);
gn2.add(cube);
// setup for rendering
const setupVisitor = new RasterSetupVisitor(gl);
setupVisitor.setup(sg);
const visitor = new RasterVisitor(gl);
let camera = {
eye: new Vector(-.5, .5, -1, 1),
center: new Vector(0, 0, 0, 1),
up: new Vector(0, 1, 0, 0),
fovy: 60,
aspect: canvas.width / canvas.height,
near: 0.1,
far: 100
};
const phongShader = new Shader(gl,
"phong-vertex-perspective-shader.glsl",
"phong-fragment-shader.glsl"
);
visitor.shader = phongShader;
const textureShader = new Shader(gl,
"texture-vertex-perspective-shader.glsl",
"texture-fragment-shader.glsl"
);
visitor.textureshader = textureShader;
let animationNodes = [
new RotationNode(gn2, new Vector(0, 0, 1))
];
function simulate(deltaT) {
for (animationNode of animationNodes) {
animationNode.simulate(deltaT);
}
}
let lastTimestamp = performance.now();
function animate(timestamp) {
simulate(timestamp - lastTimestamp);
visitor.render(sg, camera);
lastTimestamp = timestamp;
window.requestAnimationFrame(animate);
}
Promise.all(
[textureShader.load(), phongShader.load()]
).then(x =>
window.requestAnimationFrame(animate)
);
</script>
</div>
</body>
</html>
Hey there I`m trying since a while now to add a second texture
to my cube and do some bump mapping. But I am a progam beginner, so its kinda hard for me. All my maths for the matrix and vector are in the same named js.files. I also have to kinds of shaders, the texture and the phong shader. Now everyone says I have to calculate my normals, but how do I do that? And where?
Looking forward for your help!
With a normal map as in the question, Bump mapping can be performed. At bump mapping the normal vector of a fragment is read from a normal map and used for the light calculations.
In general the incident light vector is transformed into texture space. This is the "orientation" of the normal map on the object (fragment). In order to set up a 3x3 orientation matrix that describes the orientation of the map, the tangent vector and the bi-tangent vector as well as the normal vector must be known. If there is no tangent vector and no bi-tangent vector, the vectors can be approximated by the partial derivative of the vertex position and the texture coordinate in the fragment shader.
So at least the texture coordinate and the normal vector attribute are required. In the fragment shader the calculations are done in world space respectively texture space. The vertex shader is straight forward:
precision highp float;
attribute vec3 a_position;
attribute vec3 a_normal;
attribute vec2 a_texCoord;
varying vec3 w_pos;
varying vec3 w_nv;
varying vec2 o_uv;
uniform mat4 P;
uniform mat4 V;
uniform mat4 M;
void main()
{
o_uv = a_texCoord;
w_nv = normalize(mat3(M) * a_normal);
vec4 worldPos = M * vec4(a_position, 1.0);
w_pos = worldPos.xyz;
gl_Position = P * V * worldPos;
}
In the fragment shader, the normal vector is read from the normal map:
vec3 mapN = normalize(texture2D(u_normal_map, o_uv.st).xyz * 2.0 - 1.0);
The light vector is transformed to texture space:
vec3 L = tbn_inv * normalize(u_light_pos - w_pos);
With this vector the light calculations can be performed:
float kd = max(0.0, dot(mapN, L));
To calculate the matrix which transforms form world space to texture space, the partial derivative functions (dFdx, dFdy) are required. This causes that the "OES_standard_derivatives" has to be enabled (or "webgl2" context):
gl = canvas.getContext( "experimental-webgl" );
var standard_derivatives = gl.getExtension("OES_standard_derivatives");
The algorithm to calculate the tangent vector and binormal vector is explained in another answer - How to calculate Tangent and Binormal?.
Final fragment shader:
#extension GL_OES_standard_derivatives : enable
precision mediump float;
varying vec3 w_pos;
varying vec3 w_nv;
varying vec2 o_uv;
uniform vec3 u_light_pos;
uniform sampler2D u_diffuse;
uniform sampler2D u_normal_map;
void main()
{
vec3 N = normalize(w_nv);
vec3 dp1 = dFdx( w_pos );
vec3 dp2 = dFdy( w_pos );
vec2 duv1 = dFdx( o_uv );
vec2 duv2 = dFdy( o_uv );
vec3 dp2perp = cross(dp2, N);
vec3 dp1perp = cross(N, dp1);
vec3 T = dp2perp * duv1.x + dp1perp * duv2.x;
vec3 B = dp2perp * duv1.y + dp1perp * duv2.y;
float invmax = inversesqrt(max(dot(T, T), dot(B, B)));
mat3 tm = mat3(T * invmax, B * invmax, N);
mat3 tbn_inv = mat3(vec3(tm[0].x, tm[1].x, tm[2].x), vec3(tm[0].y, tm[1].y, tm[2].y), vec3(tm[0].z, tm[1].z, tm[2].z));
vec3 L = tbn_inv * normalize(u_light_pos - w_pos);
vec3 mapN = normalize(texture2D(u_normal_map, o_uv.st).xyz * 2.0 - 1.0);
float kd = max(0.0, dot(mapN, L));
vec3 color = texture2D(u_diffuse, o_uv.st).rgb;
vec3 light_col = (0.0 + kd) * color.rgb;
gl_FragColor = vec4(clamp(light_col, 0.0, 1.0), 1.0);
}
And will produce bump mapping like this:
If the tangent vector is know the calculation of tbn_inv matrix can be simplified very much:
mat3 tm = mat3(normalize(w_tv), normalize(cross(w_nv, w_tv)), normalize(w_nv));
mat3 tbn_inv = mat3(vec3(tm[0].x, tm[1].x, tm[2].x), vec3(tm[0].y, tm[1].y, tm[2].y), vec3(tm[0].z, tm[1].z, tm[2].z));
If you want Parallax mapping like in this Example then a displacement map is required, too.
The white areas on this map are pushed "into" the object. The algorithm is described in detail at LearnOpengl - Parallax Mapping.
The idea is that each fragment is associated to a height of the displacement map. This can be imagined as a rectangular pillar standing on the fragment. The view ray is tracked until a displaced fragment is hit.
For a high performance algorithm samples are taken of the displacement texture. When a fragment is identified, then the corresponding fragment of the e normal map and the diffuse texture is read. This gives a 3 dimensional look of the geometry. Note this algorithm is bot able to handle silhouettes.
Final fragment shader with steep parallax mapping:
#extension GL_OES_standard_derivatives : enable
precision mediump float;
varying vec3 w_pos;
varying vec3 w_nv;
varying vec2 o_uv;
uniform float u_height_scale;
uniform vec3 u_light_pos;
uniform vec3 u_view_pos;
uniform sampler2D u_diffuse;
uniform sampler2D u_normal_map;
uniform sampler2D u_displacement_map;
vec2 ParallaxMapping (vec2 texCoord, vec3 viewDir)
{
float numLayers = 32.0 - 31.0 * abs(dot(vec3(0.0, 0.0, 1.0), viewDir));
float layerDepth = 1.0 / numLayers;
vec2 P = viewDir.xy / viewDir.z * u_height_scale;
vec2 deltaTexCoords = P / numLayers;
vec2 currentTexCoords = texCoord;
float currentLayerDepth = 0.0;
float currentDepthMapValue = texture2D(u_displacement_map, currentTexCoords).r;
for (int i=0; i<32; ++ i)
{
if (currentLayerDepth >= currentDepthMapValue)
break;
currentTexCoords -= deltaTexCoords;
currentDepthMapValue = texture2D(u_displacement_map, currentTexCoords).r;
currentLayerDepth += layerDepth;
}
vec2 prevTexCoords = currentTexCoords + deltaTexCoords;
float afterDepth = currentDepthMapValue - currentLayerDepth;
float beforeDepth = texture2D(u_displacement_map, prevTexCoords).r - currentLayerDepth + layerDepth;
float weight = afterDepth / (afterDepth - beforeDepth);
return prevTexCoords * weight + currentTexCoords * (1.0 - weight);
}
void main()
{
vec3 N = normalize(w_nv);
vec3 dp1 = dFdx( w_pos );
vec3 dp2 = dFdy( w_pos );
vec2 duv1 = dFdx( o_uv );
vec2 duv2 = dFdy( o_uv );
vec3 dp2perp = cross(dp2, N);
vec3 dp1perp = cross(N, dp1);
vec3 T = dp2perp * duv1.x + dp1perp * duv2.x;
vec3 B = dp2perp * duv1.y + dp1perp * duv2.y;
float invmax = inversesqrt(max(dot(T, T), dot(B, B)));
mat3 tm = mat3(T * invmax, B * invmax, N);
mat3 tbn_inv = mat3(vec3(tm[0].x, tm[1].x, tm[2].x), vec3(tm[0].y, tm[1].y, tm[2].y), vec3(tm[0].z, tm[1].z, tm[2].z));
vec3 view_dir = tbn_inv * normalize(w_pos - u_view_pos);
vec2 uv = ParallaxMapping(o_uv, view_dir);
if (uv.x > 1.0 || uv.y > 1.0 || uv.x < 0.0 || uv.y < 0.0)
discard;
vec3 L = tbn_inv * normalize(u_light_pos - w_pos);
vec3 mapN = normalize(texture2D(u_normal_map, uv.st).xyz * 2.0 - 1.0);
float kd = max(0.0, dot(mapN, L));
vec3 color = texture2D(u_diffuse, uv.st).rgb;
vec3 light_col = (0.1 + kd) * color.rgb;
gl_FragColor = vec4(clamp(light_col, 0.0, 1.0), 1.0);
}
The result is much more impressive:
(function loadscene() {
var gl, progDraw, vp_size;
var bufCube = {};
var diffuse_tex = 1;
var height_tex = 2;
var normal_tex = 3;
function render(deltaMS){
var height_scale = 0.3 * document.getElementById("height").value / 100.0;
// setup view projection and model
vp_size = [canvas.width, canvas.height];
camera.Update( vp_size );
var prjMat = camera.Perspective();
var viewMat = camera.LookAt();
var modelMat = camera.AutoModelMatrix();
gl.viewport( 0, 0, vp_size[0], vp_size[1] );
gl.enable( gl.DEPTH_TEST );
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
gl.frontFace(gl.CCW)
gl.cullFace(gl.BACK)
gl.enable(gl.CULL_FACE)
// set up draw shader
ShProg.Use( progDraw );
ShProg.SetF3( progDraw, "u_view_pos", camera.pos )
ShProg.SetF3( progDraw, "u_light_pos", [0.0, 5.0, 5.0] )
ShProg.SetF1( progDraw, "u_height_scale", height_scale );
ShProg.SetI1( progDraw, "u_diffuse", diffuse_tex );
ShProg.SetI1( progDraw, "u_displacement_map", height_tex );
ShProg.SetI1( progDraw, "u_normal_map", normal_tex );
ShProg.SetM44( progDraw, "P", prjMat );
ShProg.SetM44( progDraw, "V", viewMat );
ShProg.SetM44( progDraw, "M", modelMat );
// draw scene
VertexBuffer.Draw( bufCube );
requestAnimationFrame(render);
}
function initScene() {
canvas = document.getElementById( "canvas");
gl = canvas.getContext( "experimental-webgl" );
var standard_derivatives = gl.getExtension("OES_standard_derivatives"); // dFdx, dFdy
if (!standard_derivatives)
alert('no standard derivatives support (no dFdx, dFdy)');
//gl = canvas.getContext( "webgl2" );
if ( !gl )
return null;
progDraw = ShProg.Create(
[ { source : "draw-shader-vs", stage : gl.VERTEX_SHADER },
{ source : "draw-shader-fs", stage : gl.FRAGMENT_SHADER }
] );
if ( !progDraw.progObj )
return null;
progDraw.inPos = ShProg.AttrI( progDraw, "a_position" );
progDraw.inNV = ShProg.AttrI( progDraw, "a_normal" );
progDraw.inUV = ShProg.AttrI( progDraw, "a_texCoord" );
// create cube
let Pos = [ -1,-1,1, 1,-1,1, 1,1,1, -1,1,1, -1,-1,-1, 1,-1,-1, 1,1,-1, -1,1,-1 ];
let Col = [ 1,0,0, 1,0.5,0, 1,0,1, 1,1,0, 0,1,0, 0, 0, 1 ];
let NV = [ 0,0,1, 1,0,0, 0,0,-1, -1,0,0, 0,1,0, 0,-1,0 ];
let TV = [ 1,0,0, 0,0,-1, -1,0,0, 0,0,1, 1,0,0, -1,0,0 ];
var cubeHlpInx = [ 0,1,2,3, 1,5,6,2, 5,4,7,6, 4,0,3,7, 3,2,6,7, 1,0,4,5 ];
var cubePosData = [];
for ( var i = 0; i < cubeHlpInx.length; ++ i ) cubePosData.push(Pos[cubeHlpInx[i]*3], Pos[cubeHlpInx[i]*3+1], Pos[cubeHlpInx[i]*3+2] );
var cubeNVData = [];
for ( var i1 = 0; i1 < 6; ++ i1 ) {
for ( i2 = 0; i2 < 4; ++ i2 ) cubeNVData.push(NV[i1*3], NV[i1*3+1], NV[i1*3+2]);
}
var cubeTVData = [];
for ( var i1 = 0; i1 < 6; ++ i1 ) {
for ( i2 = 0; i2 < 4; ++ i2 ) cubeTVData.push(TV[i1*3], TV[i1*3+1], TV[i1*3+2]);
}
var cubeColData = [];
for ( var is = 0; is < 6; ++ is ) {
for ( var ip = 0; ip < 4; ++ ip ) cubeColData.push(Col[is*3], Col[is*3+1], Col[is*3+2]);
}
var cubeTexData = []
for ( var i = 0; i < 6; ++ i ) cubeTexData.push( 0, 0, 1, 0, 1, 1, 0, 1 );
var cubeInxData = [];
for ( var i = 0; i < cubeHlpInx.length; i += 4 ) cubeInxData.push( i, i+1, i+2, i, i+2, i+3 );
bufCube = VertexBuffer.Create(
[ { data : cubePosData, attrSize : 3, attrLoc : progDraw.inPos },
{ data : cubeNVData, attrSize : 3, attrLoc : progDraw.inNV },
//{ data : cubeTVData, attrSize : 3, attrLoc : progDraw.inTV },
{ data : cubeTexData, attrSize : 2, attrLoc : progDraw.inUV },
//{ data : cubeColData, attrSize : 3, attrLoc : progDraw.inCol },
],
cubeInxData, gl.TRIANGLES );
Texture.LoadTexture2D( diffuse_tex, "https://raw.githubusercontent.com/Rabbid76/graphics-snippets/master/resource/texture/woodtiles.jpg" );
Texture.LoadTexture2D( height_tex, "https://raw.githubusercontent.com/Rabbid76/graphics-snippets/master/resource/texture/toy_box_disp.png" );
Texture.LoadTexture2D( normal_tex, "https://raw.githubusercontent.com/Rabbid76/graphics-snippets/master/resource/texture/toy_box_normal.png" );
camera = new Camera( [0, 3, 0], [0, 0, 0], [0, 0, 1], 90, vp_size, 0.5, 100 );
window.onresize = resize;
resize();
requestAnimationFrame(render);
}
function resize() {
//vp_size = [gl.drawingBufferWidth, gl.drawingBufferHeight];
vp_size = [window.innerWidth, window.innerHeight];
//vp_size = [256, 256];
canvas.width = vp_size[0];
canvas.height = vp_size[1];
}
function Fract( val ) {
return val - Math.trunc( val );
}
function CalcAng( deltaTime, interval ) {
return Fract( deltaTime / (1000*interval) ) * 2.0 * Math.PI;
}
function CalcMove( deltaTime, interval, range ) {
var pos = self.Fract( deltaTime / (1000*interval) ) * 2.0
var pos = pos < 1.0 ? pos : (2.0-pos)
return range[0] + (range[1] - range[0]) * pos;
}
function IdentM44() {
return [ 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1 ];
};
function RotateAxis(matA, angRad, axis) {
var aMap = [ [1, 2], [2, 0], [0, 1] ];
var a0 = aMap[axis][0], a1 = aMap[axis][1];
var sinAng = Math.sin(angRad), cosAng = Math.cos(angRad);
var matB = matA.slice(0);
for ( var i = 0; i < 3; ++ i ) {
matB[a0*4+i] = matA[a0*4+i] * cosAng + matA[a1*4+i] * sinAng;
matB[a1*4+i] = matA[a0*4+i] * -sinAng + matA[a1*4+i] * cosAng;
}
return matB;
}
function Rotate(matA, angRad, axis) {
var s = Math.sin(angRad), c = Math.cos(angRad);
var x = axis[0], y = axis[1], z = axis[2];
matB = [
x*x*(1-c)+c, x*y*(1-c)-z*s, x*z*(1-c)+y*s, 0,
y*x*(1-c)+z*s, y*y*(1-c)+c, y*z*(1-c)-x*s, 0,
z*x*(1-c)-y*s, z*y*(1-c)+x*s, z*z*(1-c)+c, 0,
0, 0, 0, 1 ];
return Multiply(matA, matB);
}
function Multiply(matA, matB) {
matC = IdentM44();
for (var i0=0; i0<4; ++i0 )
for (var i1=0; i1<4; ++i1 )
matC[i0*4+i1] = matB[i0*4+0] * matA[0*4+i1] + matB[i0*4+1] * matA[1*4+i1] + matB[i0*4+2] * matA[2*4+i1] + matB[i0*4+3] * matA[3*4+i1]
return matC;
}
function Cross( a, b ) { return [ a[1] * b[2] - a[2] * b[1], a[2] * b[0] - a[0] * b[2], a[0] * b[1] - a[1] * b[0], 0.0 ]; }
function Dot( a, b ) { return a[0]*b[0] + a[1]*b[1] + a[2]*b[2]; }
function Normalize( v ) {
var len = Math.sqrt( v[0] * v[0] + v[1] * v[1] + v[2] * v[2] );
return [ v[0] / len, v[1] / len, v[2] / len ];
}
Camera = function( pos, target, up, fov_y, vp, near, far ) {
this.Time = function() { return Date.now(); }
this.pos = pos;
this.target = target;
this.up = up;
this.fov_y = fov_y;
this.vp = vp;
this.near = near;
this.far = far;
this.orbit_mat = this.current_orbit_mat = this.model_mat = this.current_model_mat = IdentM44();
this.mouse_drag = this.auto_spin = false;
this.auto_rotate = true;
this.mouse_start = [0, 0];
this.mouse_drag_axis = [0, 0, 0];
this.mouse_drag_angle = 0;
this.mouse_drag_time = 0;
this.drag_start_T = this.rotate_start_T = this.Time();
this.Ortho = function() {
var fn = this.far + this.near;
var f_n = this.far - this.near;
var w = this.vp[0];
var h = this.vp[1];
return [
2/w, 0, 0, 0,
0, 2/h, 0, 0,
0, 0, -2/f_n, 0,
0, 0, -fn/f_n, 1 ];
};
this.Perspective = function() {
var n = this.near;
var f = this.far;
var fn = f + n;
var f_n = f - n;
var r = this.vp[0] / this.vp[1];
var t = 1 / Math.tan( Math.PI * this.fov_y / 360 );
return [
t/r, 0, 0, 0,
0, t, 0, 0,
0, 0, -fn/f_n, -1,
0, 0, -2*f*n/f_n, 0 ];
};
this.LookAt = function() {
var mz = Normalize( [ this.pos[0]-this.target[0], this.pos[1]-this.target[1], this.pos[2]-this.target[2] ] );
var mx = Normalize( Cross( this.up, mz ) );
var my = Normalize( Cross( mz, mx ) );
var tx = Dot( mx, this.pos );
var ty = Dot( my, this.pos );
var tz = Dot( [-mz[0], -mz[1], -mz[2]], this.pos );
return [mx[0], my[0], mz[0], 0, mx[1], my[1], mz[1], 0, mx[2], my[2], mz[2], 0, tx, ty, tz, 1];
};
this.AutoModelMatrix = function() {
return this.auto_rotate ? Multiply(this.current_model_mat, this.model_mat) : this.model_mat;
};
this.Update = function(vp_size) {
if (vp_size)
this.vp = vp_size;
var current_T = this.Time();
this.current_model_mat = IdentM44()
var auto_angle_x = Fract( (current_T - this.rotate_start_T) / 13000.0 ) * 2.0 * Math.PI;
var auto_angle_y = Fract( (current_T - this.rotate_start_T) / 17000.0 ) * 2.0 * Math.PI;
this.current_model_mat = RotateAxis( this.current_model_mat, auto_angle_x, 0 );
this.current_model_mat = RotateAxis( this.current_model_mat, auto_angle_y, 1 );
};
}
var Texture = {};
Texture.HandleLoadedTexture2D = function( texture, flipY ) {
gl.activeTexture( gl.TEXTURE0 + texture.unit );
gl.bindTexture( gl.TEXTURE_2D, texture.obj );
gl.pixelStorei( gl.UNPACK_FLIP_Y_WEBGL, flipY != undefined && flipY == true );
gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, texture.image );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.REPEAT );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.REPEAT );
return texture;
}
Texture.LoadTexture2D = function( unit, name ) {
var texture = {};
texture.obj = gl.createTexture();
texture.unit = unit;
texture.image = new Image();
texture.image.setAttribute('crossorigin', 'anonymous');
texture.image.onload = function () {
Texture.HandleLoadedTexture2D( texture, false )
}
texture.image.src = name;
return texture;
}
var ShProg = {
Create: function (shaderList) {
var shaderObjs = [];
for (var i_sh = 0; i_sh < shaderList.length; ++i_sh) {
var shderObj = this.Compile(shaderList[i_sh].source, shaderList[i_sh].stage);
if (shderObj) shaderObjs.push(shderObj);
}
var prog = {}
prog.progObj = this.Link(shaderObjs)
if (prog.progObj) {
prog.attrInx = {};
var noOfAttributes = gl.getProgramParameter(prog.progObj, gl.ACTIVE_ATTRIBUTES);
for (var i_n = 0; i_n < noOfAttributes; ++i_n) {
var name = gl.getActiveAttrib(prog.progObj, i_n).name;
prog.attrInx[name] = gl.getAttribLocation(prog.progObj, name);
}
prog.uniLoc = {};
var noOfUniforms = gl.getProgramParameter(prog.progObj, gl.ACTIVE_UNIFORMS);
for (var i_n = 0; i_n < noOfUniforms; ++i_n) {
var name = gl.getActiveUniform(prog.progObj, i_n).name;
prog.uniLoc[name] = gl.getUniformLocation(prog.progObj, name);
}
}
return prog;
},
AttrI: function (prog, name) { return prog.attrInx[name]; },
UniformL: function (prog, name) { return prog.uniLoc[name]; },
Use: function (prog) { gl.useProgram(prog.progObj); },
SetI1: function (prog, name, val) { if (prog.uniLoc[name]) gl.uniform1i(prog.uniLoc[name], val); },
SetF1: function (prog, name, val) { if (prog.uniLoc[name]) gl.uniform1f(prog.uniLoc[name], val); },
SetF2: function (prog, name, arr) { if (prog.uniLoc[name]) gl.uniform2fv(prog.uniLoc[name], arr); },
SetF3: function (prog, name, arr) { if (prog.uniLoc[name]) gl.uniform3fv(prog.uniLoc[name], arr); },
SetF4: function (prog, name, arr) { if (prog.uniLoc[name]) gl.uniform4fv(prog.uniLoc[name], arr); },
SetM33: function (prog, name, mat) { if (prog.uniLoc[name]) gl.uniformMatrix3fv(prog.uniLoc[name], false, mat); },
SetM44: function (prog, name, mat) { if (prog.uniLoc[name]) gl.uniformMatrix4fv(prog.uniLoc[name], false, mat); },
Compile: function (source, shaderStage) {
var shaderScript = document.getElementById(source);
if (shaderScript)
source = shaderScript.text;
var shaderObj = gl.createShader(shaderStage);
gl.shaderSource(shaderObj, source);
gl.compileShader(shaderObj);
var status = gl.getShaderParameter(shaderObj, gl.COMPILE_STATUS);
if (!status) alert(gl.getShaderInfoLog(shaderObj));
return status ? shaderObj : null;
},
Link: function (shaderObjs) {
var prog = gl.createProgram();
for (var i_sh = 0; i_sh < shaderObjs.length; ++i_sh)
gl.attachShader(prog, shaderObjs[i_sh]);
gl.linkProgram(prog);
status = gl.getProgramParameter(prog, gl.LINK_STATUS);
if ( !status ) alert(gl.getProgramInfoLog(prog));
return status ? prog : null;
} };
var VertexBuffer = {
Create: function(attribs, indices, type) {
var buffer = { buf: [], attr: [], inx: gl.createBuffer(), inxLen: indices.length, primitive_type: type ? type : gl.TRIANGLES };
for (var i=0; i<attribs.length; ++i) {
buffer.buf.push(gl.createBuffer());
buffer.attr.push({ size : attribs[i].attrSize, loc : attribs[i].attrLoc });
gl.bindBuffer(gl.ARRAY_BUFFER, buffer.buf[i]);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array( attribs[i].data ), gl.STATIC_DRAW);
}
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffer.inx);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( indices ), gl.STATIC_DRAW);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
return buffer;
},
Draw: function(bufObj) {
for (var i=0; i<bufObj.buf.length; ++i) {
gl.bindBuffer(gl.ARRAY_BUFFER, bufObj.buf[i]);
gl.vertexAttribPointer(bufObj.attr[i].loc, bufObj.attr[i].size, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray( bufObj.attr[i].loc);
}
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, bufObj.inx);
gl.drawElements(bufObj.primitive_type, bufObj.inxLen, gl.UNSIGNED_SHORT, 0);
for (var i=0; i<bufObj.buf.length; ++i)
gl.disableVertexAttribArray(bufObj.attr[i].loc);
gl.bindBuffer( gl.ARRAY_BUFFER, null );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, null );
} };
initScene();
})();
html,body { margin: 0; overflow: hidden; }
#gui { position : absolute; top : 0; left : 0; }
<script id="draw-shader-vs" type="x-shader/x-vertex">
precision highp float;
attribute vec3 a_position;
attribute vec3 a_normal;
attribute vec2 a_texCoord;
varying vec3 w_pos;
varying vec3 w_nv;
varying vec2 o_uv;
uniform mat4 P;
uniform mat4 V;
uniform mat4 M;
void main()
{
o_uv = a_texCoord;
w_nv = normalize(mat3(M) * a_normal);
vec4 worldPos = M * vec4(a_position, 1.0);
w_pos = worldPos.xyz;
gl_Position = P * V * worldPos;
}
</script>
<script id="draw-shader-fs" type="x-shader/x-fragment">
#extension GL_OES_standard_derivatives : enable
precision mediump float;
varying vec3 w_pos;
varying vec3 w_nv;
varying vec2 o_uv;
uniform float u_height_scale;
uniform vec3 u_light_pos;
uniform vec3 u_view_pos;
uniform sampler2D u_diffuse;
uniform sampler2D u_normal_map;
uniform sampler2D u_displacement_map;
vec2 ParallaxMapping (vec2 texCoord, vec3 viewDir)
{
float numLayers = 32.0 - 31.0 * abs(dot(vec3(0.0, 0.0, 1.0), viewDir));
float layerDepth = 1.0 / numLayers;
vec2 P = viewDir.xy / viewDir.z * u_height_scale;
vec2 deltaTexCoords = P / numLayers;
vec2 currentTexCoords = texCoord;
float currentLayerDepth = 0.0;
float currentDepthMapValue = texture2D(u_displacement_map, currentTexCoords).r;
for (int i=0; i<32; ++ i)
{
if (currentLayerDepth >= currentDepthMapValue)
break;
currentTexCoords -= deltaTexCoords;
currentDepthMapValue = texture2D(u_displacement_map, currentTexCoords).r;
currentLayerDepth += layerDepth;
}
vec2 prevTexCoords = currentTexCoords + deltaTexCoords;
float afterDepth = currentDepthMapValue - currentLayerDepth;
float beforeDepth = texture2D(u_displacement_map, prevTexCoords).r - currentLayerDepth + layerDepth;
float weight = afterDepth / (afterDepth - beforeDepth);
return prevTexCoords * weight + currentTexCoords * (1.0 - weight);
}
void main()
{
vec3 N = normalize(w_nv);
vec3 dp1 = dFdx( w_pos );
vec3 dp2 = dFdy( w_pos );
vec2 duv1 = dFdx( o_uv );
vec2 duv2 = dFdy( o_uv );
vec3 dp2perp = cross(dp2, N);
vec3 dp1perp = cross(N, dp1);
vec3 T = dp2perp * duv1.x + dp1perp * duv2.x;
vec3 B = dp2perp * duv1.y + dp1perp * duv2.y;
float invmax = inversesqrt(max(dot(T, T), dot(B, B)));
mat3 tm = mat3(T * invmax, B * invmax, N);
mat3 tbn_inv = mat3(vec3(tm[0].x, tm[1].x, tm[2].x), vec3(tm[0].y, tm[1].y, tm[2].y), vec3(tm[0].z, tm[1].z, tm[2].z));
vec3 view_dir = tbn_inv * normalize(w_pos - u_view_pos);
vec2 uv = ParallaxMapping(o_uv, view_dir);
if (uv.x > 1.0 || uv.y > 1.0 || uv.x < 0.0 || uv.y < 0.0)
discard;
vec3 L = tbn_inv * normalize(u_light_pos - w_pos);
vec3 mapN = normalize(texture2D(u_normal_map, uv.st).xyz * 2.0 - 1.0);
float kd = max(0.0, dot(mapN, L));
vec3 color = texture2D(u_diffuse, uv.st).rgb;
vec3 light_col = (0.1 + kd) * color.rgb;
gl_FragColor = vec4(clamp(light_col, 0.0, 1.0), 1.0);
}
</script>
<body>
<div>
<form id="gui" name="inputs">
<table>
<tr>
<td> <font color=#CCF>height scale</font> </td>
<td> <input type="range" id="height" min="0" max="100" value="50"/></td>
</tr>
</table>
</form>
</div>
<canvas id="canvas" style="border: none;" width="100%" height="100%"></canvas>
I'm rendering circles using regl, and have three goals:
The canvas should be transparent, showing HTML content behind it.
Circles should be antialiased smoothly.
Overlapping circles should look reasonable (blend colors, no corners showing)
So far, I have this: Glitch code and demo.
UPDATE: The demo links now reflect the working, accepted answer. Code below is unchanged.
index.js
const regl = require('regl');
const glsl = require('glslify');
const vertexShader = glsl.file('../shaders/vertex.glsl');
const fragmentShader = glsl.file('../shaders/fragment.glsl');
// Create webgl context and clear.
const canvasEl = document.querySelector('canvas');
const app = regl({
canvas: canvasEl,
extensions: ['OES_standard_derivatives']
});
app.clear({color: [0, 0, 0, 0], depth: 1});
// Generate random points and colors.
const attributes = {position: [], color: []};
for (let i = 0; i < 100; i++) {
attributes.position.push(Math.random() * 2 - 1, Math.random() * 2 - 1);
attributes.color.push(Math.random(), Math.random(), Math.random());
}
// Define draw instructions.
const draw = app({
vert: vertexShader,
frag: fragmentShader,
attributes: attributes,
count: 100,
primitive: 'points',
depth: {enable: true},
blend: {
enable: true
}
});
// Draw the points.
draw();
vertex.glsl
// vertex.glsl
precision mediump float;
attribute vec2 position;
attribute vec3 color;
varying vec3 vColor;
void main() {
vColor = color;
gl_Position = vec4(position, 0, 1);
gl_PointSize = 40.;
}
fragment.glsl
// fragment.glsl
#ifdef GL_OES_standard_derivatives
#extension GL_OES_standard_derivatives : enable
#endif
precision mediump float;
varying vec3 vColor;
void main() {
float r = 0.0, delta = 0.0, alpha = 1.0;
vec2 cxy = 2.0 * gl_PointCoord - 1.0;
r = dot(cxy, cxy);
#ifdef GL_OES_standard_derivatives
delta = fwidth(r);
alpha = 1.0 - smoothstep(1.0 - delta, 1.0 + delta, r);
#endif
gl_FragColor = vec4(vColor, alpha);
}
However, the result looks not-so-great. Corners are visible, and circles aren't blending properly.
I've also tried adding the following blend terms:
func: {
srcRGB: 'src alpha',
srcAlpha: 'one minus src alpha',
dstRGB: 'one minus src alpha',
dstAlpha: 'src alpha'
}
This looks a bit better, but the corners are still there and something is wrong when the background is white.
Could you suggest improvements to this? (And maybe point me to better information about blending, if that's what I'm missing here) Thanks!
You should set up your blending parameters like this:
func: {
srcRGB: 'src alpha',
srcAlpha: 'src alpha',
dstRGB: 'one minus src alpha',
dstAlpha: 'one minus src alpha'
}
This means that your destination and source color become blended like this:
Red, green and blue (srcRGB: 'src alpha', dstRGB: 'one minus src alpha'):
R_dest = R_dest * (1 - Alpha_src) + R_src * Alpha_src
G_dest = G_dest * (1 - Alpha_src) + G_src * Alpha_src
B_dest = R_dest * (1 - Alpha_src) + R_src * Alpha_src
The alpha channel (srcAlpha: 'src alpha', dstAlpha: 'one minus src alpha'):
Alpha_dest = Alpha_dest * (1 - Alpha_src) + Alpha_src * Alpha_src
seel also glBlendFunc and glBlendFuncSeparate
Further you have to make sure that the depth test is disabled
See the WebGL example above (tested with Firefox, Chrome, Edge, Opera):
<script type="text/javascript">
back_vert =
"precision mediump float; \n" +
"attribute vec2 inPos; \n" +
"varying vec2 pos; \n" +
"uniform mat4 u_projectionMat44;" +
"uniform mat4 u_modelViewMat44;" +
"void main()" +
"{" +
" pos = inPos.xy;" +
" vec4 viewPos = u_modelViewMat44 * vec4( inPos.xy, 0.0, 1.0 );" +
" gl_Position = u_projectionMat44 * viewPos;" +
"}";
back_frag =
"precision mediump float; \n" +
"varying vec2 pos; \n" +
"void main() \n" +
"{ \n" +
" vec2 coord = pos * 0.5 + 0.5; \n" +
" float gray = smoothstep( 0.3, 0.7, (coord.x + coord.y) * 0.5 ); \n" +
" gl_FragColor = vec4( vec3( gray ), 1.0 ); \n" +
"}";
draw_vert =
"precision mediump float; \n" +
"attribute vec2 inPos; \n" +
"varying vec2 pos; \n" +
"uniform mat4 u_projectionMat44;" +
"uniform mat4 u_modelViewMat44;" +
"void main()" +
"{" +
" pos = inPos.xy;" +
" vec4 viewPos = u_modelViewMat44 * vec4( inPos.xy, 0.0, 1.0 );" +
" gl_Position = u_projectionMat44 * viewPos;" +
"}";
draw_frag =
"precision mediump float; \n" +
"varying vec2 pos; \n" +
"uniform vec4 u_color;" +
"uniform vec2 u_vp;" +
"void main()" +
"{" +
" float r = length( pos );" +
" float d = 4.0 * length( 1.0 / u_vp ); \n" +
" float a = 1.0 - smoothstep( 1.0 - d, 1.0 + d, r ); \n" +
" gl_FragColor = vec4( u_color.rgb, u_color.a * a );" +
"}";
glArrayType = typeof Float32Array !="undefined" ? Float32Array : ( typeof WebGLFloatArray != "undefined" ? WebGLFloatArray : Array );
function IdentityMat44() {
var m = new glArrayType(16);
m[0] = 1; m[1] = 0; m[2] = 0; m[3] = 0;
m[4] = 0; m[5] = 1; m[6] = 0; m[7] = 0;
m[8] = 0; m[9] = 0; m[10] = 1; m[11] = 0;
m[12] = 0; m[13] = 0; m[14] = 0; m[15] = 1;
return m;
};
function RotateAxis(matA, angRad, axis) {
var aMap = [ [1, 2], [2, 0], [0, 1] ];
var a0 = aMap[axis][0], a1 = aMap[axis][1];
var sinAng = Math.sin(angRad), cosAng = Math.cos(angRad);
var matB = new glArrayType(16);
for ( var i = 0; i < 16; ++ i ) matB[i] = matA[i];
for ( var i = 0; i < 3; ++ i ) {
matB[a0*4+i] = matA[a0*4+i] * cosAng + matA[a1*4+i] * sinAng;
matB[a1*4+i] = matA[a0*4+i] * -sinAng + matA[a1*4+i] * cosAng;
}
return matB;
}
function Translate( matA, trans ) {
var matB = new glArrayType(16);
for ( var i = 0; i < 16; ++ i ) matB[i] = matA[i];
for ( var i = 0; i < 3; ++ i )
matB[12+i] = matA[i] * trans[0] + matA[4+i] * trans[1] + matA[8+i] * trans[2] + matA[12+i];
return matB;
}
function Scale( matA, scale ) {
var matB = new glArrayType(16);
for ( var i = 0; i < 16; ++ i ) matB[i] = matA[i];
for ( var a = 0; a < 4; ++ a )
for ( var i = 0; i < 3; ++ i )
matB[a*4+i] = matA[a*4+i] * scale[0];
return matB;
}
Ortho = function( l, r, t, b, n, f ) {
var fn = f + n;
var f_n = f - n;
var m = IdentityMat44();
m[0] = 2/(r-l); m[1] = 0; m[2] = 0; m[3] = 0;
m[4] = 0; m[5] = 2/(t-b); m[6] = 0; m[7] = 0;
m[8] = 0; m[9] = 0; m[10] = -2 / f_n; m[11] = -fn / f_n;
m[12] = 0; m[13] = 0; m[14] = 0; m[15] = 1;
return m;
}
vec4_add = function( a, b ) { return [ a[0]+b[0], a[1]+b[1], a[2]+b[2], a[3]+b[3] ]; }
vec4_sub = function( a, b ) { return [ a[0]-b[0], a[1]-b[1], a[2]-b[2], a[3]-b[3] ]; }
vec4_mul = function( a, b ) { return [ a[0]*b[0], a[1]*b[1], a[2]*b[2], a[3]*b[3] ]; }
vec4_scale = function( a, s ) { return [ a[0]*s, a[1]*s, a[2]*s, a[3]*s ]; }
// shader program object
var ShaderProgram = {};
ShaderProgram.Create = function( shaderList, uniformNames ) {
var shaderObjs = [];
for ( var i_sh = 0; i_sh < shaderList.length; ++ i_sh ) {
var shderObj = this.CompileShader( shaderList[i_sh].source, shaderList[i_sh].stage );
if ( shderObj == 0 )
return 0;
shaderObjs.push( shderObj );
}
var progObj = this.LinkProgram( shaderObjs )
if ( progObj != 0 ) {
progObj.unifomLocation = {};
for ( var i_n = 0; i_n < uniformNames.length; ++ i_n ) {
var name = uniformNames[i_n];
progObj.unifomLocation[name] = gl.getUniformLocation( progObj, name );
}
}
return progObj;
}
ShaderProgram.Use = function( progObj ) { gl.useProgram( progObj ); }
ShaderProgram.SetUniformInt = function( progObj, name, val ) { gl.uniform1i( progObj.unifomLocation[name], val ); }
ShaderProgram.SetUniform2f = function( progObj, name, arr ) { gl.uniform2fv( progObj.unifomLocation[name], arr ); }
ShaderProgram.SetUniform3f = function( progObj, name, arr ) { gl.uniform3fv( progObj.unifomLocation[name], arr ); }
ShaderProgram.SetUniform4f = function( progObj, name, arr ) { gl.uniform4fv( progObj.unifomLocation[name], arr ); }
ShaderProgram.SetUniformMat44 = function( progObj, name, mat ) { gl.uniformMatrix4fv( progObj.unifomLocation[name], false, mat ); }
ShaderProgram.CompileShader = function( source, shaderStage ) {
var shaderObj = gl.createShader( shaderStage );
gl.shaderSource( shaderObj, source );
gl.compileShader( shaderObj );
var status = gl.getShaderParameter( shaderObj, gl.COMPILE_STATUS );
if ( !status ) alert(gl.getShaderInfoLog(shaderObj));
return status ? shaderObj : 0;
}
ShaderProgram.LinkProgram = function( shaderObjs ) {
var prog = gl.createProgram();
for ( var i_sh = 0; i_sh < shaderObjs.length; ++ i_sh )
gl.attachShader( prog, shaderObjs[i_sh] );
gl.linkProgram( prog );
status = gl.getProgramParameter( prog, gl.LINK_STATUS );
if ( !status ) alert("Could not initialise shaders");
gl.useProgram( null );
return status ? prog : 0;
}
function drawScene(){
var canvas = document.getElementById( "camera-canvas" );
var vp = [canvas.width, canvas.height];
var currentTime = Date.now();
var deltaMS = currentTime - startTime;
var aspect = canvas.width / canvas.height;
var matOrtho = Ortho( -aspect, aspect, 1, -1, -1, 1 );
var alpha = document.getElementById( "alpha" ).value / 100;
gl.viewport( 0, 0, canvas.width, canvas.height );
gl.disable( gl.DEPTH_TEST );
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
gl.disable( gl.BLEND );
ShaderProgram.Use( progBack );
ShaderProgram.SetUniformMat44( progBack, "u_projectionMat44", matOrtho );
ShaderProgram.SetUniformMat44( progBack, "u_modelViewMat44", IdentityMat44() );
gl.enableVertexAttribArray( progBack.inPos );
gl.bindBuffer( gl.ARRAY_BUFFER, bufObj.pos );
gl.vertexAttribPointer( progBack.inPos, 2, gl.FLOAT, false, 0, 0 );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufObj.inx );
gl.drawElements( gl.TRIANGLES, bufObj.inx.len, gl.UNSIGNED_SHORT, 0 );
gl.disableVertexAttribArray( progBack.pos );
gl.enable( gl.BLEND );
gl.blendFunc( gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA );
ShaderProgram.Use( progDraw );
gl.enableVertexAttribArray( progDraw.inPos );
gl.bindBuffer( gl.ARRAY_BUFFER, bufObj.pos );
gl.vertexAttribPointer( progDraw.inPos, 2, gl.FLOAT, false, 0, 0 );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufObj.inx );
ShaderProgram.SetUniformMat44( progDraw, "u_projectionMat44", matOrtho );
ShaderProgram.SetUniform2f( progDraw, "u_vp", vp );
var col = [ [1.0,0.0,0.0], [1.0,1.0,0.0], [0.0,0.0,1.0] ];
var time = [ 7.0, 11.0, 13.0 ];
for ( var i = 0; i < 3; ++ i ) {
var modelMat = Scale( IdentityMat44(), [ 0.3, 0.3, 0.3] );
var angRad = CalcAng( currentTime, time[i] ) + i * Math.PI * 2 / 3;
var sinAng = Math.sin(angRad), cosAng = Math.cos(angRad);
modelMat[12] = cosAng * 0.3 + i * 0.2;
modelMat[13] = sinAng * 0.3 + i * 0.2;
ShaderProgram.SetUniformMat44( progDraw, "u_modelViewMat44", modelMat );
var color = col[i];
color.push( alpha );
ShaderProgram.SetUniform4f( progDraw, "u_color", color );
gl.drawElements( gl.TRIANGLES, bufObj.inx.len, gl.UNSIGNED_SHORT, 0 );
}
gl.disableVertexAttribArray( progDraw.pos );
}
var startTime;
function Fract( val ) {
return val - Math.trunc( val );
}
function CalcAng( currentTime, intervall ) {
return Fract( (currentTime - startTime) / (1000*intervall) ) * 2.0 * Math.PI;
}
function CalcMove( currentTime, intervall, range ) {
var pos = self.Fract( (currentTime - startTime) / (1000*intervall) ) * 2.0
var pos = pos < 1.0 ? pos : (2.0-pos)
return range[0] + (range[1] - range[0]) * pos;
}
var mousePos = [-1, -1];
var gl;
var prog;
var bufObj = {};
function cameraStart() {
var canvas = document.getElementById( "camera-canvas");
gl = canvas.getContext( "experimental-webgl" );
if ( !gl )
return;
var vp = [canvas.width, canvas.height];
progBack = ShaderProgram.Create(
[ { source : back_vert, stage : gl.VERTEX_SHADER },
{ source : back_frag, stage : gl.FRAGMENT_SHADER }
],
[ "u_projectionMat44", "u_modelViewMat44"] );
progBack.inPos = gl.getAttribLocation( progBack, "inPos" );
if ( progBack == 0 )
return;
progDraw = ShaderProgram.Create(
[ { source : draw_vert, stage : gl.VERTEX_SHADER },
{ source : draw_frag, stage : gl.FRAGMENT_SHADER }
],
[ "u_projectionMat44", "u_modelViewMat44", "u_color", "u_alpha", "u_vp"] );
progDraw.inPos = gl.getAttribLocation( progDraw, "inPos" );
if ( progDraw == 0 )
return;
var pos = [ -1, -1, 1, -1, 1, 1, -1, 1 ];
var inx = [ 0, 1, 2, 0, 2, 3 ];
bufObj.pos = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, bufObj.pos );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( pos ), gl.STATIC_DRAW );
bufObj.inx = gl.createBuffer();
bufObj.inx.len = inx.length;
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufObj.inx );
gl.bufferData( gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( inx ), gl.STATIC_DRAW );
startTime = Date.now();
setInterval(drawScene, 50);
}
</script>
<body onload="cameraStart();">
<div style="margin-left: 260px;">
<div style="float: right; width: 100%; background-color: #CCF;">
<form name="inputs">
<table>
<tr> <td> alpha </td>
<td> <input type="range" id="alpha" min="0" max="100" value="50"/></td> </tr>
</table>
</form>
</div>
<div style="float: right; width: 260px; margin-left: -260px;">
<canvas id="camera-canvas" style="border: none;" width="256" height="256"></canvas>
</div>
<div style="clear: both;"></div>
</div>
</body>
Canvas requires pre-multiplied alpha unless you specifically request otherwise so problem #1 is your blend function should be
blend: {
enable: true,
func: {
srcRGB: 'one',
srcAlpha: 'one',
dstRGB: 'one minus src alpha',
dstAlpha: 'one minus src alpha',
},
},
You also need to return premultiplied values from your shader
gl_FragColor = vec4(vColor, alpha);
gl_FragColor.rgb *= gl_FragColor.a; // premultiply by alpha
}
The other problem is you have the depth test on. The default depth function is LESS. That means new pixels will not be drawn unless their Z value is LESS than existing Z values. Since all of your circles are drawn at the same depth no new circle will get drawn anywhere a previous circle was drawn.
The easiest fix is to turn off the depth test
depth: {enable: false},
Result:
As for why premultiplied alpha see
https://developer.nvidia.com/content/alpha-blending-pre-or-not-pre