Is it possible to create a stereoscopic 3D vision of my scene, easilly, without resorting to three.js?
I thought about 2 canvas or two viewports, but I don't know if it's possible to do so, so I've started by trying to create a new viewport, but it just stays black and only shows the second.
function drawScene() {
gl.viewport(0, 0, gl.viewportWidth/2, gl.viewportHeight);
mat4.frustum(-24.0, 24.0, -11.0, 25.0, -100.0, 100.0, pMatrix);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT | gl.STENCIL_BUFFER_BIT);
if(perspective){
mat4.perspective(38.5, gl.viewportWidth / gl.viewportHeight, 0.1, 100.0, pMatrix);
}
if(perspectiveTP){
mat4.perspective(53.13, gl.viewportWidth / gl.viewportHeight, 0.1, 100.0, pMatrix);
}
if(orthogonal){
mat4.ortho(-24.0, 24.0, -11.0, 25.0, -100.0, 100.0, pMatrix);
}
mat4.identity(mvMatrix);
if(perspectiveTP){
mat4.lookAt([camX+frogH,camY,camZ+frogV],[frogH,1,frogV],[0,1,0], mvMatrix);
}
if(perspective){
mat4.lookAt([-12,50,17],[-12,0,17],[0,0,1], mvMatrix);
}
if(orthogonal){
mat4.lookAt([-12,52.5,10],[-12,0,10],[0,0,1], mvMatrix);
}
mat4.identity(pMatrix);
gl.viewport(gl.viewportWidth/2, 0, gl.viewportWidth/2, gl.viewportHeight);
mat4.frustum(-24.0, 24.0, -11.0, 25.0, -100.0, 100.0, pMatrix);
if(perspective){
mat4.perspective(38.5, gl.viewportWidth / gl.viewportHeight, 0.1, 100.0, pMatrix);
}
if(perspectiveTP){
mat4.perspective(53.13, gl.viewportWidth / gl.viewportHeight, 0.1, 100.0, pMatrix);
}
if(orthogonal){
mat4.ortho(-24.0, 24.0, -11.0, 25.0, -100.0, 100.0, pMatrix);
}
mat4.identity(mvMatrix);
if(perspectiveTP){
mat4.lookAt([camX+frogH,camY,camZ+frogV],[frogH,1,frogV],[0,1,0], mvMatrix);
}
if(perspective){
mat4.lookAt([-12,50,17],[-12,0,17],[0,0,1], mvMatrix);
}
if(orthogonal){
mat4.lookAt([-12,52.5,10],[-12,0,10],[0,0,1], mvMatrix);
}
(...)
EDIT: I've simply created a drawSceneLeft and a drawSceneRight, but not sure if it's the right way to achieve what I'm trying to do, any help is still welcome!
What is your draw function doing? If it's clearing the canvas then you're basically drawing one half, erasing the entire canvas, then drawing the other half.
gl.viewport only sets vertex math related functions. It doesn't stop things from drawing outside the viewport. Specifically it tells WebGL how to convert from clipspace back into pixels and it clips vertex calculations. So for example if you were to draw POINTS and you set gl_PointSize to 100 those points could bleed out of your viewport. A point whose center is outside the viewport won't get drawn at all (it's clipped) but a point whose center is on the edge inside of the viewport will be drawn. Only vertex calculation is clipped and the point is inside so it's not clipped. Once it's passed that test the primitive is still rendered, in this case a 100x100 pixel POINT of which ~50 pixels are outside the viewport.
On the other hand. gl.scissor does clip during rasterization. So, enable the scissor test gl.enable(gl.SCISSOR_TEST) and set the scissor gl.scissor(x, y, width, height) and now that same POINT above would be clipped. gl.clear will also be clipped.
TD;LR you need to set both the gl.viewport and enable and set the scissor to render multiple viewports.
Related
I have a small program I'm working on to render sprites with 2D transformations, link here. My problem is that I'm trying to render a 100px by 100px square, but it's being stretched into a rectangle. I have absolutely zero idea what the offending code is, but here's some relevant pieces.
const position = gl.createBuffer()
gl.bindBuffer(gl.ARRAY_BUFFER, position)
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-w/2, h/2,
w/2, h/2,
-w/2, -h/2,
w/2, -h/2
]), gl.STATIC_DRAW)
gl.bindBuffer(gl.ARRAY_BUFFER, position)
gl.vertexAttribPointer(attrib.vertexPosition,
2, gl.FLOAT, false, 0, 0)
gl.enableVertexAttribArray(attrib.vertex)
gl.uniformMatrix2fv(uniform.transformMatrix, false, transform)
gl.uniform2f(uniform.translation, x+w/2, y+h/2)
gl.uniform2f(uniform.screenRes, gl.canvas.width, gl.canvas.height)
Vertex shader:
attribute vec2 aVertexPosition;
attribute vec2 aTextureCoord;
uniform mat2 uTransformMatrix;
uniform vec2 uTranslation;
uniform vec2 uScreenRes;
varying vec2 vTextureCoord;
void main() {
gl_Position = vec4(2.0 * (uTransformMatrix * aVertexPosition + uTranslation) / uScreenRes - 1.0, 1.0, 1.0);
vTextureCoord = aTextureCoord;
}
Feel free to toy around with the variables in the pen, especially the canvas dimensions; when you scale a dimension down, that dimension of the sprite scales up, and vice versa.
P.S. I'm not concerned about how the texture is inverted. I'm shelving that for later.
Your code is correct, however you forgot to specify the viewport.
Add this right before you make any draw calls (in your case, ideally after gl.clear())
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height)
The WebGLRenderingContext.viewport() method of the WebGL API sets the
viewport, which specifies the affine transformation of x and y from
normalized device coordinates to window coordinates.
There is something strange going on, i am drawing a sphere dynamically using lesson11 of github.com on link http://learningwebgl.com/blog/?p=1253 ,
By dynamically mean i am taking latitudeBands and longitudeBands from the user at run time and he may change them run time to form a new sphere. (User has a choice to select at run time the latitudeBands and longitudeBands values from the given UI item option in html)
I am creating sphere using those latitudeBands and longitudeBands using the same concepts as on this link and it works fine and which i auto rotate by doing like this:
//rotation is at the end of the loop method 'tick'
function tick() {
requestAnimFrame(tick);
gl.viewport(0, 0, gl.viewportWidth, gl.viewportHeight);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
mat4.perspective(45, gl.viewportWidth / gl.viewportHeight, 0.1, 100.0, pMatrix);
gl.uniform1i(shaderProgram.useLightingUniform, false);
mat4.identity(mvMatrix);
mat4.translate(mvMatrix, [0, 0, -6]);
mat4.multiply(mvMatrix, RotationMatrix);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, imageTexture);
gl.uniform1i(shaderProgram.samplerUniform, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, VertexPositionBuffer);
gl.vertexAttribPointer(shaderProgram.vertexPositionAttribute, VertexPositionBuffer.itemSize, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, VertexTextureCoordBuffer);
gl.vertexAttribPointer(shaderProgram.textureCoordAttribute, VertexTextureCoordBuffer.itemSize, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, VertexNormalBuffer);
gl.vertexAttribPointer(shaderProgram.vertexNormalAttribute, VertexNormalBuffer.itemSize, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, VertexIndexBuffer);
setMatrixUniforms();
/*Rotation code is below*/
var newRotationMatrix = mat4.create();
mat4.identity(newRotationMatrix);
mat4.rotate(newRotationMatrix, degToRad(5 / 10), [0, 1, 0]);
mat4.multiply(newRotationMatrix, RotationMatrix, RotationMatrix);
gl.drawElements(gl.TRIANGLES, VertexIndexBuffer.numItems, gl.UNSIGNED_SHORT, 0);
}
Where is the problem ?
The problem is for the first time when i select the value dynamically it works fine. But when i select another(on second time or more) value for latitudeBands and longitudeBands at runtime from UI then the rotation speed becomes faster then previous rotation of sphere
and speed of rotation keeps on increasing as i select again and again dynamic latitudeBands and longitudeBands values.
Why this strange behavior, why it increases the speed of rotation for newly formed sphere by selected latitudeBands and longitudeBands, The rotation speed is supposed to be same as i re-draw a new sphere with new latitudeBands and longitudeBands values at same position?
How to avoid it ?
EDIT1:
var RotationMatrix = mat4.create();
mat4.identity(RotationMatrix);
and setMatrixUniforms() is
function setMatrixUniforms()
{
gl.uniformMatrix4fv(shaderProgram.pMatrixUniform, false, pMatrix);
gl.uniformMatrix4fv(shaderProgram.mvMatrixUniform, false, mvMatrix);
var normalMatrix = mat3.create();
mat4.toInverseMat3(mvMatrix, normalMatrix);
mat4.transpose(normalMatrix);
gl.uniformMatrix3fv(shaderProgram.nMatrixUniform, false, normalMatrix);
}
Could it be the reason that some matrice is not refreshed, or the rotation keeps on increasing with previous value?
I think mat4.multiply(newRotationMatrix, RotationMatrix, RotationMatrix); should be mat4.multiply(newRotationMatrix, RotationMatrix, newRotationMatrix); ?
I'm trying to get rendering to a floating point texture working in WebGL on iOS Safari (not in a native app). I have managed to get iOS to read a manually (e.g. from JavaScript) created floating point texture, however when I create a framebuffer of floating point type and use the GPU to render into it, it does not work.
I've isolated the issue to code that renders to a floating point texture, which is then passed to another shader to be displayed. Here is what the result looks like applied to a cube:
The render to texture draws a green square, half the size of the texture, which is then applied to each side of the cube.
This all works perfectly fine on both desktop and iOS WebGL as long as the type of the texture that the green square is rendered to is the standard unsigned byte type. However, changing the type to floating point causes the render to texture to fail on iOS devices (while continuing to work on desktop browsers). The texture is empty, as if nothing had been rendered to it.
I have created an example project here to demonstrate the issue: https://github.com/felixpalmer/render-2-texture
Changing the precision of the shaders using the THREE.Renderer.precision setting does not make a difference
As far as I know no iOS device supports rendering to a floating point texture (nor do most mobile devices at this point in time 3/2015)
My understanding of the WebGL spec is
OES_texture_float: Allows you to create and read from 32bit float textures but rendering to a floating point is device dependent.
OES_texture_float_linear: Allows linear filter floating point textures. If this doesn't exist and OES_texture_float does then you can only use gl.NEAREST for floating point textures.
OES_texture_half_float and OES_texture_half_float_linear are the same as above except for half float textures.
The traditional way to see if you can render to a floating point texture in WebGL, assuming OES_texture_float exists, is to create a framebuffer, attach a floating point texture to it, then call gl.checkFramebufferStatus. If it returns gl.FRAMEBUFFER_COMPLETE then you can, if not then you can't. Note: This method should work regardless of the next paragraph.
The spec was updated so you could also check WebGL extensions to find out if it's possible to render to a floating point texture. The extension WEBGL_color_buffer_float is supposed to tell you you can render to floating point textures. The extension EXT_color_buffer_half_float is the same for half float textures. I know of no browser that actually shows these extensions though yet they support floating point rendering if the hardware supports it.
For example my 2012 Retina MBP on Chrome 41 reports
gl = document.createElement("canvas").getContext("webgl").getSupportedExtensions()
["ANGLE_instanced_arrays",
"EXT_blend_minmax",
"EXT_frag_depth",
"EXT_shader_texture_lod",
"EXT_sRGB",
"EXT_texture_filter_anisotropic",
"WEBKIT_EXT_texture_filter_anisotropic",
"OES_element_index_uint",
"OES_standard_derivatives",
"OES_texture_float",
"OES_texture_float_linear",
"OES_texture_half_float",
"OES_texture_half_float_linear",
"OES_vertex_array_object",
"WEBGL_compressed_texture_s3tc",
"WEBKIT_WEBGL_compressed_texture_s3tc",
"WEBGL_debug_renderer_info",
"WEBGL_debug_shaders",
"WEBGL_depth_texture",
"WEBKIT_WEBGL_depth_texture",
"WEBGL_lose_context",
"WEBKIT_WEBGL_lose_context"]
Firefox 36 reports
gl = document.createElement("canvas").getContext("webgl").getSupportedExtensions().join("\n")
"ANGLE_instanced_arrays
EXT_blend_minmax
EXT_frag_depth
EXT_sRGB
EXT_texture_filter_anisotropic
OES_element_index_uint
OES_standard_derivatives
OES_texture_float
OES_texture_float_linear
OES_texture_half_float
OES_texture_half_float_linear
OES_vertex_array_object
WEBGL_compressed_texture_s3tc
WEBGL_depth_texture
WEBGL_draw_buffers
WEBGL_lose_context
MOZ_WEBGL_lose_context
MOZ_WEBGL_compressed_texture_s3tc
MOZ_WEBGL_depth_texture"
The browser vendors are busy implementing WebGL 2.0 and given the gl.checkFramebufferStatus method works there's no pressure to spend time making the other extension strings appear.
Apparently some iOS devices support EXT_color_buffer_half_float so you could try creating a half float texture, attach it to a framebuffer and check its status then see if that works.
Here's a sample to check support. Running it on my iPadAir2 and my iPhone5s I get
can make floating point textures
can linear filter floating point textures
can make half floating point textures
can linear filter floating point textures
can **NOT** render to FLOAT texture
successfully rendered to HALF_FLOAT_OES texture
which is exactly what we expected.
"use strict";
function log(msg) {
var div = document.createElement("div");
div.appendChild(document.createTextNode(msg));
document.body.appendChild(div);
}
function glEnum(gl, v) {
for (var key in gl) {
if (gl[key] === v) {
return key;
}
}
return "0x" + v.toString(16);
}
window.onload = function() {
// Get A WebGL context
var canvas = document.getElementById("c");
var gl = canvas.getContext("webgl");
if (!gl) {
return;
}
function getExt(name, msg) {
var ext = gl.getExtension(name);
log((ext ? "can " : "can **NOT** ") + msg);
return ext;
}
var testFloat = getExt("OES_texture_float", "make floating point textures");
getExt("OES_texture_float_linear", "linear filter floating point textures");
var testHalfFloat = getExt("OES_texture_half_float", "make half floating point textures");
getExt("OES_texture_half_float_linear", "linear filter half floating point textures");
gl.HALF_FLOAT_OES = 0x8D61;
// setup GLSL program
var program = webglUtils.createProgramFromScripts(gl, ["2d-vertex-shader", "2d-fragment-shader"]);
gl.useProgram(program);
// look up where the vertex data needs to go.
var positionLocation = gl.getAttribLocation(program, "a_position");
var colorLoc = gl.getUniformLocation(program, "u_color");
// provide texture coordinates for the rectangle.
var positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-1.0, -1.0,
1.0, -1.0,
-1.0, 1.0,
-1.0, 1.0,
1.0, -1.0,
1.0, 1.0]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
var whiteTex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, whiteTex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE,
new Uint8Array([255, 255, 255, 255]));
function test(format) {
var tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, format, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
var fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
var status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
if (status !== gl.FRAMEBUFFER_COMPLETE) {
log("can **NOT** render to " + glEnum(gl, format) + " texture");
return;
}
// Draw the rectangle.
gl.bindTexture(gl.TEXTURE_2D, whiteTex);
gl.uniform4fv(colorLoc, [0, 10, 20, 1]);
gl.drawArrays(gl.TRIANGLES, 0, 6);
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.clearColor(1, 0, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.uniform4fv(colorLoc, [0, 1/10, 1/20, 1]);
gl.drawArrays(gl.TRIANGLES, 0, 6);
var pixel = new Uint8Array(4);
gl.readPixels(0, 0, 1, 1, gl.RGBA, gl.UNSIGNED_BYTE, pixel);
if (pixel[0] !== 0 ||
pixel[1] < 248 ||
pixel[2] < 248 ||
pixel[3] < 254) {
log("FAIL!!!: Was not able to actually render to " + glEnum(gl, format) + " texture");
} else {
log("succesfully rendered to " + glEnum(gl, format) + " texture");
}
}
if (testFloat) {
test(gl.FLOAT);
}
if (testHalfFloat) {
test(gl.HALF_FLOAT_OES);
}
}
canvas {
border: 1px solid black;
}
<script src="//webglfundamentals.org/webgl/resources/webgl-utils.js"></script>
<canvas id="c" width="16" height="16"></canvas>
<!-- vertex shader -->
<script id="2d-vertex-shader" type="x-shader/x-vertex">
attribute vec4 a_position;
void main() {
gl_Position = a_position;
}
</script>
<!-- fragment shader -->
<script id="2d-fragment-shader" type="x-shader/x-fragment">
precision mediump float;
uniform vec4 u_color;
uniform sampler2D u_texture;
void main() {
gl_FragColor = texture2D(u_texture, vec2(0.5, 0.5)) * u_color;
}
</script>
Does WebGL have a zBuffer or depth?
So here's my problem:
The cube is being cropped or not being rendered properly. Setting the 'z' coordinates beside 0 and 1 results to object / geometry being cropped / cut.
Additionaly I've already implemented this on C++ and it works properly just porting it webGL and OpenGL ES.
Here's my persepective view configuration:
mat4.perspective(45.0, gl.viewportWidth / gl.viewportHeight, 0.1, 1000.0, pMatrix);
also tried setting the zFar to a much more larger value but the result are the same.
Here's the code that I used for vector translation:
mat4.translate(mvMatrix, mvMatrix, [0.0, 0.0, 2.0]);
and also tried this to check if there's a problem on the mat4.translate function this:
var x = 0.0, y = 0.0, z = 2.0;
var position = [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
x, y, z, 1,
];
mat4.multiply(mvMatrix, mvMatrix, position);
Also check and tried setting the values directly in Vertex Shader:
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition.x, aVertexPosition.y, 2.0, 1.0);
Update:
Also set to enabled the DEPTH_TEST.
gl.enable(gl.DEPTH_TEST);
gl.depthFunc(gl.LEQUAL);
Finally, I solved it, the cause of the problem is due to invalid argument parameters and outdated tutorials on the internet regarding usage of the gl-matrix.js library.
mat4.perspective(45.0, gl.viewportWidth / gl.viewportHeight, 0.1, 1000.0, pMatrix);
whereas of the new gl-matrix parameters should be like this:
mat4.perspective(pMatrix, 45.0, gl.viewportWidth / gl.viewportHeight, 0.1, 1000.0);
After checking the correct documentation of the JS library.
If you apply a projection, you normally look from the origin in the direction of the negative z-axis. So I think you need a translation by a negative value in z-direction to get your geometry in view, while you translate by [0.0, 0.0, 2.0].
You say you're trying to draw a cube, but your vertex shader hardwires all z-coordinates to 2.0. You can't really get any kind of 3-dimensional shape if you're using the same z-coordinate for all vertices. Well, you technically could, but I doubt that this is what you want.
I created a simple scene with a cube moving parallel to the x-axis. Everything works as expected until I rotate the camera around the y-axis. Then the cube follows this rotation and moves parallel to the screen (x-axis in camera coordinates).
Again the initial setup:
Camera at [0, 2, 10] looking at [0, 0, 0]
Cube initially placed at [0, 0, 0], moving along the x-axis between [-10, 10]
Why does my camera movement affect the orientation of the cube?
Here is some of the relevant code. I you would like to see more, don't hesitate to ask. I am using glMatrix for vector and matrix operations.
Main drawing routine:
// Clear the canvas before we start drawing on it.
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// Use the full window (minus border)
canvas.width = window.innerWidth - 16;
canvas.height = window.innerHeight - 16;
// Set viewport
gl.viewport(0, 0, canvas.width, canvas.height);
// Reset the perspective matrix
cam.aspectRatio = canvas.width / canvas.height;
mat4.perspective(perspectiveMatrix, cam.fovy, cam.aspectRatio, cam.nearPlane, cam.farPlane);
// Create the mvMatrix
mat4.lookAt(mvMatrix, cam.position, cam.poi, cam.up);
// Draw all objects
for (i = 0; i < ObjectStack.length; i++) {
ObjectStack[i].draw();
}
Camera rotation:
// Rotation via yaw and pitch (FPS-style)
this.rotateYP = function (yaw, pitch) {
// Rotation speed
var rotSpeed = 0.5;
yaw *= rotSpeed;
pitch *= rotSpeed;
// Update rotation
var quatYaw = quat.create();
quat.setAxisAngle(quatYaw, this.up, degToRad(yaw));
var quatPitch = quat.create();
quat.setAxisAngle(quatPitch, this.right, degToRad(pitch));
var quatCombined = quat.create();
quat.multiply(quatCombined, quatYaw, quatPitch);
// Update camera vectors
var tmp = vec3.create();
vec3.subtract(tmp, this.poi, this.position);
vec3.transformQuat(tmp, tmp, quatCombined);
vec3.add(tmp, this.position, tmp);
this.setPOI(tmp);
};
My setPOI() method (POI = point of interest):
this.setPOI = function (poi) {
// Set new poi
vec3.copy(this.poi, poi);
// Set new view vector
vec3.subtract(this.view, poi, this.position);
vec3.normalize(this.view, this.view);
// Set new right vector
vec3.cross(this.right, this.view, [0.0, 1.0, 0.0]);
vec3.normalize(this.right, this.right);
// Set new up vector
vec3.cross(this.up, this.right, this.view);
vec3.normalize(this.up, this.up);
};
Object draw method for the cube:
this.draw = function () {
// Save current mvMatrix
mvPushMatrix();
// Object movement
mat4.translate(mvMatrix, mvMatrix, position);
// Object rotation
//mat4.mul(mvMatrix, mvMatrix, orientation);
// Object scaling
// ...
// Set shader
setShader();
// Bind the necessary buffers
gl.bindBuffer(gl.ARRAY_BUFFER, verticesBuffer);
gl.vertexAttribPointer(positionAttribute, 3, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, normalsBuffer);
gl.vertexAttribPointer(normalAttribute, 3, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
gl.vertexAttribPointer(texCoordAttribute, 2, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, vertexIndexBuffer);
// Set active texture
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, cubeTexture);
gl.uniform1i(gl.getUniformLocation(ShaderStack[shader], "uSampler"), 0);
// Send the triangles to the graphics card for drawing
gl.drawElements(gl.TRIANGLES, 36, gl.UNSIGNED_SHORT, 0);
gl.bindTexture(gl.TEXTURE_2D, null);
// Clean up the changed mvMatrix
mvPopMatrix();
};
And finally the setShader() used above:
function setShader() {
var shaderProgram = ShaderStack[shader];
gl.useProgram(shaderProgram);
var pUniform = gl.getUniformLocation(shaderProgram, "uPMatrix");
gl.uniformMatrix4fv(pUniform, false, perspectiveMatrix);
var mvUniform = gl.getUniformLocation(shaderProgram, "uMVMatrix");
gl.uniformMatrix4fv(mvUniform, false, mvMatrix);
var normalMatrix = mat4.create();
mat4.invert(normalMatrix, mvMatrix);
mat4.transpose(normalMatrix, normalMatrix);
var nUniform = gl.getUniformLocation(shaderProgram, "uNormalMatrix");
gl.uniformMatrix4fv(nUniform, false, normalMatrix);
normalAttribute = gl.getAttribLocation(shaderProgram, "aVertexNormal");
gl.enableVertexAttribArray(normalAttribute);
positionAttribute = gl.getAttribLocation(shaderProgram, "aVertexPosition");
gl.enableVertexAttribArray(positionAttribute);
texCoordAttribute = gl.getAttribLocation(shaderProgram, "aTextureCoord");
gl.enableVertexAttribArray(texCoordAttribute);
};
Sorry for posting all this code here. If you have any idea, please let me know!
I suspect you answered your question in your own question:
a simple scene with a cube moving parallel to the x-axis ... Then the cube follows this rotation and moves parallel to the screen (x-axis in camera coordinates).
Something like this happening leads me to believe that you applied the translation operation to your model-view matrix, not your model matrix, and from your code, I think I am right:
mat4.translate(mvMatrix, mvMatrix, position);
To fix this, you'll want to separate out your model and your view matrix, apply the translation to your model matrix, and then multiply the result by your view. Let me know how it goes!
If you're still confused by matrices, give this a read:
http://solarianprogrammer.com/2013/05/22/opengl-101-matrices-projection-view-model/