WebGL: Set opacity when using texture2D() - javascript

I have a simple WebGL scene created by Three.js for which I'm using a custom shader. The scene contains an image and I'd like to set the opacity of the image, but manipulating the .a attribute of gl_FragColor is not doing the trick.
Does anyone know how to set the opacity of an image in WebGL? I'd be grateful for any ideas others can offer on this question!
<html>
<head>
<style>
html, body { width: 100%; height: 100%; background: #000; }
body { margin: 0; overflow: hidden; }
canvas { width: 100%; height: 100%; }
</style>
</head>
<body>
<script src='https://cdnjs.cloudflare.com/ajax/libs/three.js/92/three.min.js'></script>
<script src='https://threejs.org/examples/js/controls/TrackballControls.js'></script>
<script type='x-shader/x-vertex' id='vertex-shader'>
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
uniform vec3 cameraPosition;
attribute vec2 uv;
attribute vec3 position;
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
</script>
<script type='x-shader/x-fragment' id='fragment-shader'>
precision highp float; // set float precision (optional)
uniform sampler2D texture; // identify the texture as a uniform argument
varying vec2 vUv; // identify the uv values as a varying attribute
void main() {
vec4 color = texture2D(texture, vUv);
gl_FragColor = vec4(color[0], color[1], color[2], 0.0);
}
</script>
<script>
function getScene() {
var scene = new THREE.Scene();
scene.background = new THREE.Color(0xffffff);
return scene;
}
function getCamera() {
var aspectRatio = window.innerWidth / window.innerHeight;
var camera = new THREE.PerspectiveCamera(75, aspectRatio, 0.1, 1000);
camera.position.set(0, 1, 10);
return camera;
}
function getRenderer() {
// Create the canvas with a renderer
var renderer = new THREE.WebGLRenderer({antialias: true});
// Add support for retina displays
renderer.setPixelRatio(window.devicePixelRatio);
// Specify the size of the canvas
renderer.setSize(window.innerWidth, window.innerHeight);
// Add the canvas to the DOM
document.body.appendChild(renderer.domElement);
return renderer;
}
function getControls(camera, renderer) {
var controls = new THREE.TrackballControls(camera, renderer.domElement);
controls.zoomSpeed = 0.4;
controls.panSpeed = 0.4;
return controls;
}
function loadImage() {
var geometry = new THREE.BufferGeometry();
/*
Now we need to push some vertices into that geometry to identify the coordinates the geometry should cover
*/
// Identify the image size
var imageSize = {width: 10, height: 7.5};
// Identify the x, y, z coords where the image should be placed
var coords = {x: -5, y: -3.75, z: 0};
// Add one vertex for each corner of the image, using the
// following order: lower left, lower right, upper right, upper left
var vertices = new Float32Array([
coords.x, coords.y, coords.z, // bottom left
coords.x+imageSize.width, coords.y, coords.z, // bottom right
coords.x+imageSize.width, coords.y+imageSize.height, coords.z, // upper right
coords.x, coords.y+imageSize.height, coords.z, // upper left
])
// set the uvs for this box; these identify the following corners:
// lower-left, lower-right, upper-right, upper-left
var uvs = new Float32Array([
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
])
// indices = sequence of index positions in `vertices` to use as vertices
// we make two triangles but only use 4 distinct vertices in the object
// the second argument to THREE.BufferAttribute is the number of elements
// in the first argument per vertex
geometry.setIndex([0,1,2, 2,3,0])
geometry.addAttribute('position', new THREE.BufferAttribute( vertices, 3 ));
geometry.addAttribute('uv', new THREE.BufferAttribute( uvs, 2) )
// Create a texture loader so we can load our image file
var loader = new THREE.TextureLoader();
// specify the url to the texture
var url = 'https://s3.amazonaws.com/duhaime/blog/tsne-webgl/assets/cat.jpg';
// specify custom uniforms and attributes for shaders
// Uniform types: https://github.com/mrdoob/three.js/wiki/Uniforms-types
var material = new THREE.RawShaderMaterial({
uniforms: {
texture: {
type: 't',
value: loader.load(url)
},
transparent: true,
},
vertexShader: document.getElementById('vertex-shader').textContent,
fragmentShader: document.getElementById('fragment-shader').textContent
});
// Combine our image geometry and material into a mesh
var mesh = new THREE.Mesh(geometry, material);
// Set the position of the image mesh in the x,y,z dimensions
mesh.position.set(0,0,0)
// Add the image to the scene
scene.add(mesh);
}
/**
* Render!
**/
function render() {
requestAnimationFrame(render);
renderer.render(scene, camera);
controls.update();
};
var scene = getScene();
var camera = getCamera();
var renderer = getRenderer();
var controls = getControls(camera, renderer);
loadImage();
render();
</script>
</body>
</html>

You need to set
material.transparent = true;
Doing so will force normal alpha-blending to be set automatically by the renderer.
three.js r.95

Related

Threejs fade in/out mesh shader material

I'm learning Threejs and glsl shaders. I want to fade in/out a mesh when adding or removing it from the scene. My mesh is a tubeGeometry (which joins two points from an sphere, taken from this tutorial at the end) and a shaderMaterial. My mesh configuration looks like this.
const tubeSegments = 20;
const path = new CatmullRomCurve3(points);
// Points is an array of vec3
const geom = new TubeGeometry(path, tubeSegments, 0.01, 8, false);
const material = new ShaderMaterial({
vertexShader,
fragmentShader,
side: DoubleSide,
uniforms: {
time: {
value: mesh_fragments_time.get(),
},
color: {
value: new Vector3(1, 1, 0),
},
},
});
const mesh = new Mesh(
geom,
material
);
The vertexShader:
varying vec2 vertexUV;
varying vec3 vertexNormal;
void main(){
vertexUV=uv;
vertexNormal=normalize(normalMatrix * normal);
gl_Position=projectionMatrix*modelViewMatrix*vec4(position,1);
}
The fragmenShader:
varying vec2 vertexUV;
uniform float time;
uniform vec3 color;
void main () {
float dash = sin(vertexUV.x*60. - time);
if (dash>0.) discard;
gl_FragColor=vec4(color,0);
}
Where mesh_fragments_time.get() or time is a number that changes on my requestAnimation and makes the object dashed.
I've tried adding opacity to the shader material and changing it after adding to scene, but doesn't work.
I suppose I have to do that inside of the fragment shader but don't know how. Can someone help me?
R147

Weird Line When Wrapping Image in Fragment Shader

I have made a simple fragment shader using THREE.js. It reads each pixel's coordinate, adds a value to the x component (wrapping to 0 if it goes above 1), and returns the color of a background image at this new location. This has the effect of shifting the background image over and wrapping the part that goes off-screen. The problem is that a dark line sometimes appears where the edge of the image is shifted over:
Here is my code:
var vertexShader = `
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
`;
var fragmentShader = `
varying vec2 vUv;
uniform sampler2D background;
void main() {
float x = mod(vUv.x + 0.47, 1.0);
float y = vUv.y;
gl_FragColor = texture(background, vec2(x, y));
}
`;
$(document).ready(function() {
var plotElement = $("#plot");
var scene = new THREE.Scene();
var renderer = new THREE.WebGLRenderer();
var camera = new THREE.OrthographicCamera(-0.5, 0.5, 0.5, -0.5, 0, 1000);
renderer.setSize(500, 500);
plotElement.append(renderer.domElement);
var background = new THREE.TextureLoader().load('https://wearablewearyphase.davidbrock1.repl.co/background.png');
var material = new THREE.ShaderMaterial({
vertexShader: vertexShader,
fragmentShader: fragmentShader,
uniforms: {
background: {
value: background
},
},
});
geom = new THREE.PlaneBufferGeometry(1, 1);
mesh = new THREE.Mesh(geom, material);
scene.add(mesh);
camera.z = 1;
function render() {
requestAnimationFrame(render);
renderer.render(scene, camera);
}
render();
});
<!DOCTYPE html>
<html>
<head>
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r118/three.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.5.1/jquery.min.js"></script>
</head>
<body>
<div id="plot"></div>
</body>
</html>
I am quite sure that the line is not in the original image:
The boundary should not be visible at all because the left and right sides are the same color. Furthermore, the line only appears for some shift distances (49% and 47%, but not 50% or 48%, for example). I have found that if I make the output bigger than the background image than the line disappears, but I would prefer not to do this.
What is causing this line and how can I prevent it?
Note:
I just used the "mod" function as an example. Originally, I had another program (another shader) calculate x and y coordinates for every pixel and save them in another texture as the red and green components. The fragment shader then looked these coordinates up in the image.
I ran into this problem while trying to create an animation like this. The lines started appearing all over the screen and did not look good.
This is happening because the fragment shader interpolates values across pixels. So one pixel column approaches 1.0, the next one is a very squished version of your entire texture between 1.0 - 0.0, and the next one starts over at 0.0.
The easiest way to circumvent this behavior is to set your texture's wrapping mode to THREE.RepeatWrapping and get rid of the mod() so as your texture goes above 1.0, it'll automatically start over from the left again.

Unable to load texture in WebGl

I am able to load RGB colours but not textures. If it could be some settings problem please prompt me.
This is the screenshot of chrome://flags
The HTML code is given :
<!DOCTYPE html>
<meta charset="UTF-8">
<html>
<head>
<title>WebGL Cube with Texture</title>
<script type="x-shader/x-vertex" id="vshader">
attribute vec3 coords;
attribute vec2 texCoords;
uniform vec3 normal;
uniform mat4 modelview;
uniform mat4 projection;
uniform mat3 normalMatrix;
varying vec3 vNormal;
varying vec2 vTexCoords;
void main() {
vec4 coords = vec4(coords,1.0);
vec4 transformedVertex = modelview * coords;
vNormal = normalMatrix * normal;
vTexCoords = texCoords;
gl_Position = projection * transformedVertex;
}
</script>
<script type="x-shader/x-fragment" id="fshader">
precision mediump float;
uniform bool textured;
uniform sampler2D sampler;
varying vec3 vNormal;
varying vec2 vTexCoords;
uniform vec4 color;
void main() {
if (textured) {
vec4 color = texture2D(sampler, vTexCoords);
vec3 unitNormal = normalize(vNormal);
float multiplier = abs(unitNormal.z);
gl_FragColor = vec4( multiplier*color.r, multiplier*color.g, multiplier*color.b, color.a );
}
else {
gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0); // use basic white when texture's not there.
}
}
</script>
<script type="text/javascript" src="gl-matrix-min.js"></script>
<script type="text/javascript" src="simple-rotator.js"></script>
<script type="text/javascript">
"use strict";
var gl; // The webgl context.
var aCoords; // Location of the coords attribute variable in the shader program.
var aCoordsBuffer; // Buffer to hold coords.
var aTexCoords; // Location of the texCoords attribute variable in the shader program.
var aTexCoordsBuffer; // Buffer to hold texCoords.
var uProjection; // Location of the projection uniform matrix in the shader program.
var uModelview; // Location of the modelview unifirm matrix in the shader program.
var uNormal; // Location of the normal uniform in the shader program.
var uColor; // Location of the color uniform in the shader program, used only for axes.
var uTextured; // Location of the textured uniform in the shader program.
var uSampler; // Location of the sampler in the shader program.
var uNormalMatrix; // Location of the normalMatrix uniform matrix in the shader program.
var projection = mat4.create(); // projection matrix
var modelview = mat4.create(); // modelview matrix
var normalMatrix = mat3.create(); // matrix, derived from modelview matrix, for transforming normal vectors
var rotator; // A SimpleRotator object to enable rotation by mouse dragging.
var textureID = null; // Texture object, to be created after image has loaded.
/* Draws a colored cube, along with a set of coordinate axes.
* (Note that the use of the above drawPrimitive function is not an efficient
* way to draw with WebGL. Here, the geometry is so simple that it doesn't matter.)
*/
function draw() {
gl.clearColor(0,0,0,1);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
if (document.getElementById("persproj").checked) {
mat4.perspective(projection, Math.PI/4, 1, 2, 10);
}
else {
mat4.ortho(projection,-2.5, 2.5, -2.5, 2.5, 2, 10);
}
gl.uniformMatrix4fv(uProjection, false, projection );
var modelview = rotator.getViewMatrix();
var saveModelview = mat4.clone(modelview);
if (textureID) {
gl.uniform1i( uTextured, 1 ); // Tell shader to use texture and lighting.
gl.bindTexture(gl.TEXTURE_2D, textureID); // Which texture should be used.
gl.uniform1i(uSampler, 0); // Set sampler in shadre to use texture unit zero.
}
else {
gl.uniform1i( uTextured, 0 ); // Cube will appear in plain white.
}
drawFace(modelview) // front face of the cube
mat4.rotateY(modelview,modelview,Math.PI/2); //right face
drawFace(modelview) // front face
mat4.rotateY(modelview,modelview,Math.PI/2); //back face
drawFace(modelview) // front face
mat4.rotateY(modelview,modelview,Math.PI/2); //left face
drawFace(modelview) // front face
modelview = mat4.clone(saveModelview);
mat4.rotateX(modelview,modelview,Math.PI/2);
drawFace(modelview) // top face
mat4.rotateX(modelview,modelview,Math.PI);
drawFace(modelview) // bottom face
}
/**
* Draws the front face of the cube, subject to a modelview transform.
*/
function drawFace(modelview) {
gl.uniformMatrix4fv(uModelview, false, modelview );
mat3.normalFromMat4(normalMatrix, modelview);
gl.uniformMatrix3fv(uNormalMatrix, false, normalMatrix);
gl.uniform3f(uNormal, 0, 0, 1);
gl.drawArrays(gl.TRIANGLE_FAN, 0, 4); // front face
}
/**
* Loads data for the front face of the cube into VBOs.
*/
function createFace() {
var vertices = [ -1,-1,1, 1,-1,1, 1,1,1, -1,1,1 ];
var texCoords = [ 0,0, 2,0, 2,2, 0,2 ];
gl.enableVertexAttribArray(aCoords);
gl.bindBuffer(gl.ARRAY_BUFFER,aCoordsBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
gl.vertexAttribPointer(aCoords, 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(aTexCoords);
gl.bindBuffer(gl.ARRAY_BUFFER,aTexCoordsBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(texCoords), gl.STATIC_DRAW);
gl.vertexAttribPointer(aTexCoords, 2, gl.FLOAT, false, 0, 0);
}
/**
* Load an image from the URL "textures/bridk001.jpg". The image is loade
* asynchronously. When the
*/
function loadTexture() {
var img = new Image();
img.onload = function() {
var id = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D,id);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img);
gl.generateMipmap(gl.TEXTURE_2D);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.REPEAT);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.REPEAT);
textureID = id;
draw();
}
img.src = "./skin.jpg";
}
/* Creates a program for use in the WebGL context gl, and returns the
* identifier for that program. If an error occurs while compiling or
* linking the program, an exception of type String is thrown. The error
* string contains the compilation or linking error. If no error occurs,
* the program identifier is the return value of the function.
*/
function createProgram(gl, vertexShaderSource, fragmentShaderSource) {
var vsh = gl.createShader( gl.VERTEX_SHADER );
gl.shaderSource(vsh,vertexShaderSource);
gl.compileShader(vsh);
if ( ! gl.getShaderParameter(vsh, gl.COMPILE_STATUS) ) {
throw "Error in vertex shader: " + gl.getShaderInfoLog(vsh);
}
var fsh = gl.createShader( gl.FRAGMENT_SHADER );
gl.shaderSource(fsh, fragmentShaderSource);
gl.compileShader(fsh);
if ( ! gl.getShaderParameter(fsh, gl.COMPILE_STATUS) ) {
throw "Error in fragment shader: " + gl.getShaderInfoLog(fsh);
}
var prog = gl.createProgram();
gl.attachShader(prog,vsh);
gl.attachShader(prog, fsh);
gl.linkProgram(prog);
if ( ! gl.getProgramParameter( prog, gl.LINK_STATUS) ) {
throw "Link error in program: " + gl.getProgramInfoLog(prog);
}
return prog;
}
/* Gets the text content of an HTML element. This is used
* to get the shader source from the script elements that contain
* it. The parameter should be the id of the script element.
*/
function getTextContent( elementID ) {
var element = document.getElementById(elementID);
var fsource = "";
var node = element.firstChild;
var str = "";
while (node) {
if (node.nodeType == 3) // this is a text node
str += node.textContent;
node = node.nextSibling;
}
return str;
}
/**
* Initializes the WebGL program including the relevant global variables
* and the WebGL state. Creates a SimpleView3D object for viewing the
* cube and installs a mouse handler that lets the user rotate the cube.
*/
function init() {
try {
var canvas = document.getElementById("glcanvas");
gl = canvas.getContext("webgl");
if ( ! gl ) {
gl = canvas.getContext("experimental-webgl");
}
if ( ! gl ) {
throw "Could not create WebGL context.";
}
var vertexShaderSource = getTextContent("vshader");
var fragmentShaderSource = getTextContent("fshader");
var prog = createProgram(gl,vertexShaderSource,fragmentShaderSource);
gl.useProgram(prog);
aCoords = gl.getAttribLocation(prog, "coords");
aTexCoords = gl.getAttribLocation(prog, "texCoords");
uModelview = gl.getUniformLocation(prog, "modelview");
uProjection = gl.getUniformLocation(prog, "projection");
uSampler = gl.getUniformLocation(prog, "sampler");
uNormal = gl.getUniformLocation(prog, "normal");
uColor = gl.getUniformLocation(prog, "color");
uTextured = gl.getUniformLocation(prog, "textured");
uNormalMatrix = gl.getUniformLocation(prog, "normalMatrix");
aCoordsBuffer = gl.createBuffer();
aTexCoordsBuffer = gl.createBuffer();
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE); // no need to draw back faces
document.getElementById("persproj").checked = true;
rotator = new SimpleRotator(canvas,draw);
rotator.setView( [2,2,5], [0,1,0], 6 );
}
catch (e) {
document.getElementById("message").innerHTML =
"Could not initialize WebGL: " + e;
return;
}
createFace();
loadTexture();
draw();
}
</script>
</head>
<body onload="init()" style="background-color:#DDD">
<h2>A Cube With a Brick Texture</h2>
<p id=message>Drag the mouse on the canvas to rotate the view.</p>
<p>
<input type="radio" name="projectionType" id="persproj" value="perspective" onchange="draw()">
<label for="persproj">Perspective projection</label>
<input type="radio" name="projectionType" id="orthproj" value="orthogonal" onchange="draw()" style="margin-left:1cm">
<label for="orthproj">Orthogonal projection</label>
<button onclick="rotator.setView( [2,2,5], [0,1,0], 6 ); draw()" style="margin-left:1cm">Reset View</button>
</p>
<noscript><hr><h3>This page requires Javascript and a web browser that supports WebGL</h3><hr></noscript>
<div>
<canvas width=600 height=600 id="glcanvas" style="background-color:red"></canvas>
</div>
</body>
</html>
All i get as an output is
The other functions are loading fine. The file paths are correct.
The issue is you need to run a simple web server for WebGL dev. It should take you about 2 minutes to get setup
See this
Try defining the minification and magnification parameters for the texture object.
eg:
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
Use the appropriate value for min and mag filter, based on your project requirement.

three.js point clouds, BufferGeometry and incorrect transparency

The problem: I have a point cloud with quite a lot of data points (around one million). When I apply transparency to the rendered points, the transparency somehow does not show what is behind the rendered points
As you can see in the example of the marked point, it does not show what it should, it is as if there is a problem with the buffering.
I use three.js to create a point cloud using the following "setup":
The renderer:
this.renderer = new THREE.WebGLRenderer({
canvas: this.canvas,
antialias: true
});
The material:
this.pointMaterial = new THREE.ShaderMaterial( {
uniforms: {
time: { type: "f", value: 1.0 }
},
vertexShader: document.getElementById('vertexShader').textContent,
fragmentShader: document.getElementById('fragmentShader').textContent,
transparent: true
});
The vertex shader:
attribute float size;
attribute float opacity;
attribute vec3 color;
varying vec3 vColor;
varying float vOpacity;
void main() {
vColor = color;
vOpacity = opacity;
vec4 mvPosition = modelViewMatrix * vec4(position, 1.0);
gl_PointSize = size * (500.0 / length(mvPosition.xyz));
gl_Position = projectionMatrix * mvPosition;
}
The fragment shader:
uniform float time;
varying vec3 vColor;
varying float vOpacity;
void main() {
gl_FragColor = vec4(vColor, vOpacity);
}
The geometry (where I left out the part where I populate the arrays):
var bufferGeometry = new THREE.BufferGeometry();
var vertices = new Float32Array(vertexPositions.length * 3);
var colors = new Float32Array(vertexColors.length * 3);
var sizes = new Float32Array(vertexSizes.length);
var opacities = new Float32Array(vertexOpacities.length);
bufferGeometry.addAttribute('position', new THREE.BufferAttribute(vertices, 3));
bufferGeometry.addAttribute('color', new THREE.BufferAttribute(colors, 3));
bufferGeometry.addAttribute('size', new THREE.BufferAttribute(sizes, 1));
bufferGeometry.addAttribute('opacity', new THREE.BufferAttribute(opacities, 1));
this.points = new THREE.Points(bufferGeometry, this.pointMaterial);
this.scene.add(this.points);
I tried this with the built-in point material, where the same happens
this.pointMaterial = new THREE.PointsMaterial({
size: this.pointSize,
vertexColors: THREE.VertexColors,
transparent: true,
opacity: 0.25
});
Is this a but, expected behaviour or am I doing something wrong?
The way the alpha blending equation works is that a source colour for geometry that is behind is covered by a destination colour for geometry that is in front. This means you need to render your transparent geometry in sorted order from back to front, so that geometry in front will correctly blend with geometry behind.
If all you have is transparent geometry then you can just disable depth testing, render in reverse depth sorted order, and it will work. If you have opaque geometry as well then you need to first render all opaque geometry normally, then disable depth writing (not testing) and render transparent geometry in reverse depth sorted order, then re-enable depth writing.
Here are some answers to similar questions if you're interested in learning a bit more.

WebGL Fragment Shader constructor error - Too many arguments

I'm working on a Javascript/OpenGL(WebGL) program that's supposed to make a 3D cube with a texture colored to look like a Rubik's cube. So far, I have the texture mapped just fine, and every point in the cube seems to be showing properly.
However, I'm having a tough time getting the color to display on it without getting some errors from the fragment shader. At the moment, I get this one error I've been trying to figure out with no luck. Here's the error message.
Error: fragment shader compiler: ERROR: 0:11: 'constructor' : too many arguments
Here's my fragment shader code to look at specifically.
<script id="fragment-shader" type="x-shader">
precision mediump float;
uniform sampler2D image0;
varying vec2 textureCoordinatesV;
varying vec4 pointColorV;
void main()
{
// extract the RGBA color from image at given coordinates.
vec4 oldColor = texture2D( image0, textureCoordinatesV );
vec4 newColor;
newColor = vec4(oldColor.r * pointColorV, oldColor.g * pointColorV, oldColor.b * pointColorV, 1.0);
gl_FragColor = newColor;
}
</script>
Here's the image I'm using for this program.
http://tinypic.com/view.php?pic=2rpbbqq&s=9#.Vkj3jPmrTIU
And here's my code in it's entirety. I am aware of three.js to render 3D objects in WebGL, but this current approach is for me to get a better understanding of how to render the object with existing tools. Also, if you're having trouble seeing the cube, try running this program in Firefox.
There is a Vector4.js and Matrix4.js I'm using with this program, but from what I understand, they don't have anything to do with the current error message I'm receiving. Vector4 is just a script that establishes a Vector4 structure for me to use to hold vertices to render, and Matrix4 is a script for moving those vertices once they're established. If need be, I will post these files in the comments.
Any and all help would be appreciated on figuring out why this error keeps coming up, thank you very much.
<html>
<head>
<title>Template WebGL file</title>
</head>
<body onload="main()">
<canvas id="myCanvas" width="400" height="400"></canvas>
<!-- Load external file with helper setup functions. -->
<script src="webgl-utils.js"></script>
<script src="Vector4.js"></script>
<script src="Matrix4.js"></script>
<!-- vertex shader code -->
<script id="vertex-shader" type="x-shader">
attribute vec4 pointPosition;
uniform mat4 transformation;
attribute vec4 pointColorA;
varying vec4 pointColorV;
attribute vec2 textureCoordinatesA;
varying vec2 textureCoordinatesV;
void main()
{
gl_Position = transformation * pointPosition;
textureCoordinatesV = textureCoordinatesA;
pointColorV = pointColorA;
}
</script>
<!-- fragment shader code -->
<script id="fragment-shader" type="x-shader">
precision mediump float;
uniform sampler2D image0;
varying vec2 textureCoordinatesV;
varying vec4 pointColorV;
void main()
{
// extract the RGBA color from image at given coordinates.
vec4 oldColor = texture2D( image0, textureCoordinatesV );
vec4 newColor;
newColor = vec4(oldColor.r * pointColorV, oldColor.g * pointColorV, oldColor.b * pointColorV, 1.0);
gl_FragColor = newColor;
}
</script>
<!-- main Javascript program -->
<script>
// DECLARE GLOBAL JAVASCRIPT VARIABLES
var canvas = document.getElementById('myCanvas');
var gl; // the WebGL context
var transformationData, transformationAddress;
var x, y, z, angle, scale, time;
function main()
{
// STANDARD STARTUP CODE
gl = setupWebGL(canvas);
var program = initShaders( gl, "vertex-shader", "fragment-shader" );
gl.useProgram( program );
gl.enable(gl.DEPTH_TEST);
// WRITE MAIN JAVASCRIPT CODE HERE
// [x,y,z, r,g,b]
var v0 = [0,0,0]; // back; black
var v1 = [0,0,1]; // z axis; blue
var v2 = [0,1,0]; // y axis; green
var v3 = [0,1,1]; // y+z; cyan
var v4 = [1,0,0]; // x axis; red
var v5 = [1,0,1]; // x+z; magenta
var v6 = [1,1,0]; // x+y; yellow
var v7 = [1,1,1]; // all; white
var uv0 = [0,0];
var uv1 = [0,1];
var uv2 = [1,0];
var uv3 = [1,1];
var cr = [1,0,0]; //red
var cg = [0,1,0]; //green
var cb = [0,0,1]; //blue
var cy = [1,1,0]; //yellow
var co = [1,0.5,0]; //orange
var cw = [1,1,1]; //white
var data = [];
// left x2, right x2, front x2, back x2, up x2, down x2
data = data.concat( v7,uv3,cr,v3,uv1,cr,v1,uv0,cr, v7,uv3,cr,v1,uv0,cr,v5,uv2,cr, // front
v3,uv3,cy,v2,uv1,cy,v0,uv0,cy, v3,uv3,cy,v0,uv0,cy,v1,uv2,cy, // left
v6,uv3,cb,v2,uv1,cb,v3,uv0,cb, v6,uv3,cb,v3,uv0,cb,v7,uv2,cb, // up
v2,uv3,co,v6,uv1,co,v4,uv0,co, v2,uv3,co,v4,uv0,co,v0,uv2,co, // back
v6,uv3,cw,v7,uv1,cw,v5,uv0,cw, v6,uv3,cw,v5,uv0,cw,v4,uv2,cw, // right
v5,uv3,cg,v1,uv1,cg,v0,uv0,cg, v5,uv3,cg,v0,uv0,cg,v4,uv2,cg, v0 ); // down
var attributeArray = new Float32Array( data );
var attributeBuffer = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, attributeBuffer );
gl.bufferData( gl.ARRAY_BUFFER, attributeArray, gl.STATIC_DRAW );
var pointPositionAddress =
gl.getAttribLocation( program, "pointPosition" );
var textureCoordinatesAddress =
gl.getAttribLocation( program, "textureCoordinatesA" );
var pointColorAddress =
gl.getAttribLocation( program, "pointColorA" );
var BPE = attributeArray.BYTES_PER_ELEMENT;
gl.vertexAttribPointer(
pointPositionAddress, 3, gl.FLOAT, false, 8*BPE, 0*BPE );
gl.enableVertexAttribArray( pointPositionAddress );
gl.vertexAttribPointer(
textureCoordinatesAddress, 3, gl.FLOAT, false, 8*BPE, 3*BPE );
gl.enableVertexAttribArray( textureCoordinatesAddress );
gl.vertexAttribPointer(
pointColorAddress, 3, gl.FLOAT, false, 8 * BPE, 4*BPE);
gl.enableVertexAttribArray(pointColorAddress);
transformationAddress = gl.getUniformLocation( program, "transformation" );
transformationData = new Matrix4().setIdentity();
x = 0;
y = 0;
z = 0;
angle = 0;
scale = 1;
time = 0;
// set up texture buffer
var textureBuffer = gl.createTexture();
var imageAddress = gl.getUniformLocation( program, "image0" );
var imageData = new Image();
// when image is done loading run some code (load into GPU)
imageData.onload = function() {
loadTexture0(textureBuffer, imageData, imageAddress); }
// start loading the image
imageData.src = "DDBingoGrid.png";
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
loop();
}
// OTHER FUNCTIONS
function loadTexture0( tb, id, ia )
{
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);
gl.activeTexture( gl.TEXTURE0 );
gl.bindTexture(gl.TEXTURE_2D, tb);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB,
gl.UNSIGNED_BYTE, id);
gl.uniform1i( ia, 0 );
}
function loop()
{
update();
render();
setTimeout( loop, 16 );
}
// update JavaScript variables; send new data to GPU
function update()
{
time += 0.016; // 60 FPS!
x += 0.00;
y += 0.00;
z += 0.00;
angle += 0.01;
scale += 0.00;
var model = new Matrix4();
var t = new Matrix4();
t.setTranslation(-1/2, 0, -1/2);
var rx = new Matrix4();
rx.setRotationX( angle );
var ry = new Matrix4();
ry.setRotationY(angle);
model = rx.multiply(ry).multiply(t);
var camera = new Matrix4();
camera.setTranslation(0,0,6);
var view = camera.inverse();
var projection = new Matrix4();
projection.setPerspective( 45, 1, 0.1, 100 );
transformationData = projection.multiply(view).multiply(model);
gl.uniformMatrix4fv( transformationAddress, false, transformationData.toArray() );
}
// draw all the things
function render()
{
gl.clear( gl.COLOR_BUFFER_BIT );
gl.clear( gl.DEPTH_BUFFER_BIT );
gl.drawArrays( gl.TRIANGLES, 0, 36 );
}
</script>
</body>
</html>
The problem lies in this line:
newColor = vec4(oldColor.r * pointColorV,
oldColor.g * pointColorV,
oldColor.b * pointColorV,
1.0);
In glsl, the float*vector operation returns a vector. Since pointColorV is a vector, you try to pass three vec4 objects into the vec4 constructor, which is not possible. You can solve that by adding the correct swizzle operator .r/.g/.b after pointColorV. But the better option is to write the whole thing as one operation:
newColor = vec4(oldColor.rgb * pointColorV.rgb, 1.0);

Categories

Resources