Related
I am trying to preserve the proportions at least a little when the window is resized to smaller sizes at the CodePen here. Currently it turns out to be really hard to see the lines and the interaction on mobile. Do you have a solution for this? Maybe it makes sense to double the scale on resize based on the window but I am a bit lost on how I can implement it.
The responsible part of the JS:
onWindowResize();
window.addEventListener('resize', onWindowResize, false);
}
function onWindowResize(event) {
container.style.height = window.innerHeight+"px";
container.style.width = window.innerWidth+"px";
canvasWidth = container.offsetWidth;
canvasHeight = container.offsetHeight;
//send new size value to the shader and resize the window
uniforms.resolution.value.x = canvasWidth;
uniforms.resolution.value.y = canvasHeight;
//var res = canvasWidth / cols;
//rows = canvasHeight / res;
uniforms.colsrows.value.x = cols;
uniforms.colsrows.value.y = rows;//rows;
renderer.setSize(canvasWidth, canvasHeight);
}
Here is the pen:
//Create var for the contenair, the webGL 3D scene, uniforms to bind into shader and timer
var container;
var camera, scene, renderer;
var uniforms;
var startTime;
var cols = 50.;
var rows = 50.0;
init(); //init scene
animate(); //updateScene
function init() {
//get contenaire
container = document.getElementById('container');
//Create THREE.JS scene and timer
startTime = Date.now();
camera = new THREE.Camera();
camera.position.z = 1;
scene = new THREE.Scene();
//create a simple plance
var geometry = new THREE.PlaneBufferGeometry(16, 9);
//create uniform table which provide all our GLSL binding
uniforms = {
time: { type: "f", value: 1.0 },
resolution: { type: "v2", value: new THREE.Vector2() },
colsrows: {type: "v2", value: new THREE.Vector2()},
mouse: {type: "v2", value: new THREE.Vector2()}
};
//create THREE.JS material
var material = new THREE.ShaderMaterial( {
//set shaders and uniforms into material
uniforms: uniforms,
vertexShader: document.getElementById('vertexShader').textContent,
fragmentShader: document.getElementById('fragmentShader').textContent
} );
//create mesh, add it to the scene
var mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
//create renderer and add it to the DOM
renderer = new THREE.WebGLRenderer();
container.appendChild(renderer.domElement);
//check window for resize This will give us the proper resolution values to bind
onWindowResize();
window.addEventListener('resize', onWindowResize, false);
}
function onWindowResize(event) {
container.style.height = window.innerHeight+"px";
container.style.width = window.innerWidth+"px";
canvasWidth = container.offsetWidth;
canvasHeight = container.offsetHeight;
//send new size value to the shader and resize the window
uniforms.resolution.value.x = canvasWidth;
uniforms.resolution.value.y = canvasHeight;
//var res = canvasWidth / cols;
//rows = canvasHeight / res;
uniforms.colsrows.value.x = cols;
uniforms.colsrows.value.y = rows;//rows;
renderer.setSize(canvasWidth, canvasHeight);
}
function animate() {
render();
requestAnimationFrame(animate);
}
function render() {
var currentTime = Date.now();
var elaspedSeconds = (currentTime - startTime) / 1000.0;
var maxTime = 4.0;
var normTime = (elaspedSeconds % maxTime) / maxTime;
uniforms.time.value = elaspedSeconds * 1.0;
renderer.render(scene, camera);
}
function move(ev){
mx = ev.clientX
my = ev.clientY;
// console.log(mx+" , "+my);
uniforms.mouse.value.x = mx;
uniforms.mouse.value.y = my;
}
document.addEventListener('mousemove', move)
html, body {
margin: 0;
height: 100%;
background : #1a1a1a;
}
canvas {
display: block;
cursor: none;
}
#container{
background : black;
color : white;
margin: auto;
width : 500px;
height : 500px;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/110/three.min.js"></script>
<div id="container"></div>
<!-- GLSL SCRIPT -->
<!-- vertex shader -->
<script id="vertexShader" type="x-shader/x-vertex">
void main() {
gl_Position = vec4(position, 1.0);
}
</script>
<!-- fragment shader -->
<script id="fragmentShader" type="x-shader/x-fragment">
#define TWO_PI 6.28318530718
#define EPSILON 0.000011
uniform vec2 resolution;
uniform float time;
uniform vec2 colsrows;
uniform vec2 mouse;
float HueToRGB(float f1, float f2, float hue)
{
if (hue < 0.0)
hue += 1.0;
else if (hue > 1.0)
hue -= 1.0;
float res;
if ((6.0 * hue) < 1.0)
res = f1 + (f2 - f1) * 6.0 * hue;
else if ((2.0 * hue) < 1.0)
res = f2;
else if ((3.0 * hue) < 2.0)
res = f1 + (f2 - f1) * ((2.0 / 3.0) - hue) * 6.0;
else
res = f1;
return res;
}
vec3 HSLToRGB(vec3 hsl)
{
vec3 rgb;
if (hsl.y == 0.0)
rgb = vec3(hsl.z); // Luminance
else
{
float f2;
if (hsl.z < 0.5)
f2 = hsl.z * (1.0 + hsl.y);
else
f2 = (hsl.z + hsl.y) - (hsl.y * hsl.z);
float f1 = 2.0 * hsl.z - f2;
rgb.r = HueToRGB(f1, f2, hsl.x + (1.0/3.0));
rgb.g = HueToRGB(f1, f2, hsl.x);
rgb.b= HueToRGB(f1, f2, hsl.x - (1.0/3.0));
}
return rgb;
}
mat2 rotate2d(float _angle){
return mat2(cos(_angle),-sin(_angle),
sin(_angle),cos(_angle));
}
vec2 rotateFrom(vec2 uv, vec2 center, float angle){
vec2 uv_ = uv - center;
uv_ = rotate2d(angle) * uv_;
uv_ = uv_ + center;
return uv_;
}
float random(float value){
return fract(sin(value) * 43758.5453123);
}
float random(vec2 tex){
return fract(sin(dot(tex.xy, vec2(12.9898, 78.233))) * 43758.5453123);
}
vec2 random2D(vec2 uv){
uv = vec2(dot(uv, vec2(127.1, 311.7)), dot(uv, vec2(269.5, 183.3)));
//return -1.0 + 2.0 * fract(sin(uv) * 43758.5453123);
return fract(sin(uv) * 43758.5453123); //return without offset on x, y
}
vec3 random3D(vec3 uv){
uv = vec3(dot(uv, vec3(127.1, 311.7, 120.9898)), dot(uv, vec3(269.5, 183.3, 150.457)), dot(uv, vec3(380.5, 182.3, 170.457)));
return -1.0 + 2.0 * fract(sin(uv) * 43758.5453123);
}
float cubicCurve(float value){
return value * value * (3.0 - 2.0 * value); // custom cubic curve
}
vec2 cubicCurve(vec2 value){
return value * value * (3.0 - 2.0 * value); // custom cubic curve
}
vec3 cubicCurve(vec3 value){
return value * value * (3.0 - 2.0 * value); // custom cubic curve
}
float noise(vec2 uv){
vec2 iuv = floor(uv);
vec2 fuv = fract(uv);
vec2 suv = cubicCurve(fuv);
float dotAA_ = dot(random2D(iuv + vec2(0.0)), fuv - vec2(0.0));
float dotBB_ = dot(random2D(iuv + vec2(1.0, 0.0)), fuv - vec2(1.0, 0.0));
float dotCC_ = dot(random2D(iuv + vec2(0.0, 1.0)), fuv - vec2(0.0, 1.0));
float dotDD_ = dot(random2D(iuv + vec2(1.0, 1.0)), fuv - vec2(1.0, 1.0));
return mix(
mix(dotAA_, dotBB_, suv.x),
mix(dotCC_, dotDD_, suv.x),
suv.y);
}
float noise(vec3 uv){
vec3 iuv = floor(uv);
vec3 fuv = fract(uv);
vec3 suv = cubicCurve(fuv);
float dotAA_ = dot(random3D(iuv + vec3(0.0)), fuv - vec3(0.0));
float dotBB_ = dot(random3D(iuv + vec3(1.0, 0.0, 0.0)), fuv - vec3(1.0, 0.0, 0.0));
float dotCC_ = dot(random3D(iuv + vec3(0.0, 1.0, 0.0)), fuv - vec3(0.0, 1.0, 0.0));
float dotDD_ = dot(random3D(iuv + vec3(1.0, 1.0, 0.0)), fuv - vec3(1.0, 1.0, 0.0));
float dotEE_ = dot(random3D(iuv + vec3(0.0, 0.0, 1.0)), fuv - vec3(0.0, 0.0, 1.0));
float dotFF_ = dot(random3D(iuv + vec3(1.0, 0.0, 1.0)), fuv - vec3(1.0, 0.0, 1.0));
float dotGG_ = dot(random3D(iuv + vec3(0.0, 1.0, 1.0)), fuv - vec3(0.0, 1.0, 1.0));
float dotHH_ = dot(random3D(iuv + vec3(1.0, 1.0, 1.0)), fuv - vec3(1.0, 1.0, 1.0));
float passH0 = mix(
mix(dotAA_, dotBB_, suv.x),
mix(dotCC_, dotDD_, suv.x),
suv.y);
float passH1 = mix(
mix(dotEE_, dotFF_, suv.x),
mix(dotGG_, dotHH_, suv.x),
suv.y);
return mix(passH0, passH1, suv.z);
}
float drawLine(vec2 uv, vec2 p1, vec2 p2, float r)
{
//from https://www.shadertoy.com/view/MtlSDr
vec2 l = p2 - p1;
float L = length(l);
float L2 = L*L;
float d1 = length(uv - p1);
float d2 = length(uv - p2);
float d = min(d1, d2);
float ds = dot(uv - p1, l);
if (ds >= 0.0 && ds <= L2)
{
vec2 n = vec2(-l.y, l.x) / L;
d = min(d, abs(dot(uv - p1, n)));
}
return 1.0 - smoothstep(0.0, 0.01, d - r);
}
vec2 fishey(vec2 uv, vec2 center, float ratio, float dist){
vec2 puv = uv + vec2(1.0);
//center coords
vec2 m = vec2(center.x, center.y/ratio) + vec2(1.0);
//vector from center to current fragment
vec2 d = puv - m;
// distance of pixel from center
float r = sqrt(dot(d, d));
//amount of effect
float power = ( TWO_PI / (2.0 * sqrt(dot(m, m)))) * mix(0.1, 0.4, pow(dist, 0.75));
//radius of 1:1 effect
float bind;
if (power > 0.0) bind = sqrt(dot(m, m));//stick to corners
//else {if (ratio < 1.0) bind = m.x; else bind = m.y;}//stick to borders
//Weird formulas
vec2 nuv;
if (power > 0.0)//fisheye
nuv = m + normalize(d) * tan(r * power) * bind / tan( bind * power);
else if (power < 0.0)//antifisheye
nuv = m + normalize(d) * atan(r * -power * 10.0) * bind / atan(-power * bind * 10.0);
else
nuv = puv;//no effect for power = 1.0
return nuv - vec2(1.0);
}
vec4 addGrain(vec2 uv, float time, float grainIntensity){
float grain = random(fract(uv * time)) * grainIntensity;
return vec4(vec3(grain), 1.0);
}
void main(){
vec2 ouv = gl_FragCoord.xy / resolution.xy;
vec2 uv = ouv;
float ratio = resolution.x / resolution.y;
vec2 nmouse = vec2(mouse.x, mouse.y) / resolution.xy;
nmouse.y = 1.0 - nmouse.y;
float maxDist = 0.35;
float blurEdge = maxDist * 0.5;
float blurEdge2 = maxDist * 1.0;
vec2 mouseToUV = (uv - nmouse) / vec2(1.0, ratio);
float mouseDistance = 1.0 - smoothstep(maxDist - blurEdge, maxDist, length(mouseToUV));
float mouseDistance2 = 1.0 - smoothstep(maxDist - blurEdge2, maxDist, length(mouseToUV));
uv = fishey(uv, nmouse, ratio, mouseDistance2);
uv = rotateFrom(uv, vec2(0.5), time * 0.1);
//animate y
//wave
uv.y /= ratio;
vec2 basedUV = uv + vec2(1.0);
float complexityX = 10.0;
float complexityY = 10.0;
float maxAmp = mix(0.05, 0.75, mouseDistance);
float amp = 0.01 * mouseDistance + noise(vec3(basedUV.x * complexityX, basedUV.y * complexityY, time * 0.1)) * maxAmp;
float theta = time + mouseDistance + basedUV.y * (TWO_PI);
uv.x = fract(uv.x + sin(theta) * amp);
//divide into cols rows
vec2 nuv = uv * colsrows;
vec2 fuv = fract(nuv);
vec2 iuv = floor(nuv);
float minSpeed = 1.0;
float maxSpeed = 5.0;
float speed = minSpeed + random(floor(uv.x * colsrows.x)) * (maxSpeed - minSpeed);
fuv.y = fract(fuv.y + time * speed);
//draw dash line
float minWeight = 0.005 + random(vec2(iuv.x, 0.0)) * 0.05;
float strokeWeight = mix(minWeight, minWeight * 5.0, mouseDistance);
float dlineWidth = mix(1.0, 0.25 - strokeWeight, mouseDistance);//0.5 - strokeWeight;
float dline = drawLine(fuv, vec2(0.5, 0.5 - dlineWidth * 0.5), vec2(0.5, 0.5 + dlineWidth * 0.5), strokeWeight);
float randIndexHue = random(vec2(iuv.x + floor(time), 0.0));
float noiseHue = noise(vec3(randIndexHue, randIndexHue, time));
float hue = mix(0.111, 0.138, randIndexHue + (noiseHue * 0.5));
vec4 grain = addGrain(ouv, time, 0.15);
//vec3 color = HSLToRGB(vec3(hue, 1.0, 0.5));
//vec3 bgColor = HSLToRGB(vec3(0.772, mix(0.75, 1.0, mouseDistance), mix(0.1, 0.25, mouseDistance)));
vec3 color = vec3(1.0);
vec3 bgColor = vec3(0.0);
float val = dline * (mouseDistance * 0.5 + 0.5);
vec3 albedo = mix(bgColor, color, val);
gl_FragColor = vec4(albedo, 1.0) + grain;
}
</script>
The thickness of the lines is computed in drawLine and depends on the parameters to smoothstep:
float drawLine(vec2 uv, vec2 p1, vec2 p2, float r)
{
// [...]
return 1.0 - smoothstep(0.0, 0.01, d - r);
}
Increase the parameter to edge1, to generate "thicker" lines (e.g. 0.1):
float drawLine(vec2 uv, vec2 p1, vec2 p2, float r)
{
// [...]
return 1.0 - smoothstep(0.0, 0.1, d - r);
}
You can add an additional uniform variable for the line:
uniform float thickness;
float drawLine(vec2 uv, vec2 p1, vec2 p2, float r)
{
// [...]
return 1.0 - smoothstep(0.0, thickness, d - r);
}
uniforms = {
// [...]
thickness: { type: "f", value: 0.1 },
};
how can I add a picture instead of blue, or so that the blue color is transparent?
Here's a code snippet:
/*
Most of the stuff in here is just bootstrapping. Essentially it's just
setting ThreeJS up so that it renders a flat surface upon which to draw
the shader. The only thing to see here really is the uniforms sent to
the shader. Apart from that all of the magic happens in the HTML view
under the fragment shader.
*/
let container;
let camera, scene, renderer;
let uniforms;
function init() {
container = document.getElementById( 'container' );
camera = new THREE.Camera();
camera.position.z = 1;
scene = new THREE.Scene();
var geometry = new THREE.PlaneBufferGeometry( 2, 2 );
uniforms = {
u_time: { type: "f", value: Math.random() * 1000.0 },
u_resolution: { type: "v2", value: new THREE.Vector2() },
u_mouse: { type: "v2", value: new THREE.Vector2() }
};
var material = new THREE.ShaderMaterial( {
uniforms: uniforms,
vertexShader: document.getElementById( 'vertexShader' ).textContent,
fragmentShader: document.getElementById( 'fragmentShader' ).textContent
} );
var mesh = new THREE.Mesh( geometry, material );
scene.add( mesh );
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio( window.devicePixelRatio );
container.appendChild( renderer.domElement );
onWindowResize();
window.addEventListener( 'resize', onWindowResize, false );
document.onmousemove = function(e){
uniforms.u_mouse.value.x = e.pageX
uniforms.u_mouse.value.y = e.pageY
}
}
function onWindowResize( event ) {
renderer.setSize( window.innerWidth, window.innerHeight );
uniforms.u_resolution.value.x = renderer.domElement.width;
uniforms.u_resolution.value.y = renderer.domElement.height;
}
function animate() {
requestAnimationFrame( animate );
render();
}
function render() {
uniforms.u_time.value += 0.05;
renderer.render( scene, camera );
}
init();
animate();
html, body {
height: 100%;
margin: 0;
}
body {
overflow: auto;
}
.to-change-container {
height: 300px;
width:300px;
display: flex;
justify-content: center;
align-items: center;
}
.height-stratcher {
height: 1000px;
}
.to-change {
max-height: 100%;
max-width: 100%;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/88/three.min.js"></script>
<script id="vertexShader" type="x-shader/x-vertex">
void main() {
gl_Position = vec4( position, 1.0 );
}
</script>
<script id="fragmentShader" type="x-shader/x-fragment">
uniform vec2 u_resolution;
uniform float u_time;
const float NOISE_SIZE = 3.; // The size of the noise. Essentially the multiplier for the noise UV. Smaller = bigger
const float INTENSITY = 20.; // The intensity of the displacement
const float REFLECTION_INTENSITY = 4.; // The intensity of the rellowish reflections.
const int octaves = 2; // the number of octaves to generate in the FBM noise
const float seed = 43758.5453123; // A random seed :)
/*
Underwater Sun
Liam Egan - 2018
----------------------
A basic rippling pattern distorted by a very light amount of FBM noise.
Many many thanks to Inigo Quilez, Patricio Gonzalez Vivo,
Gary Warne, and many many others.
"Nanos gigantum humeris insidentes"
*/
vec2 random2(vec2 st, float seed){
st = vec2( dot(st,vec2(127.1,311.7)),
dot(st,vec2(269.5,183.3)) );
return -1.0 + 2.0*fract(sin(st)*seed);
}
// Value Noise by Inigo Quilez - iq/2013
// https://www.shadertoy.com/view/lsf3WH
float noise(vec2 st, float seed) {
vec2 i = floor(st);
vec2 f = fract(st);
vec2 u = f*f*(3.0-2.0*f);
return mix( mix( dot( random2(i + vec2(0.0,0.0), seed ), f - vec2(0.0,0.0) ),
dot( random2(i + vec2(1.0,0.0), seed ), f - vec2(1.0,0.0) ), u.x),
mix( dot( random2(i + vec2(0.0,1.0), seed ), f - vec2(0.0,1.0) ),
dot( random2(i + vec2(1.0,1.0), seed ), f - vec2(1.0,1.0) ), u.x), u.y);
}
float fbm1(in vec2 _st, float seed) {
float v = 0.0;
float a = 0.5;
vec2 shift = vec2(100.0);
// Rotate to reduce axial bias
mat2 rot = mat2(cos(0.5), sin(0.5),
-sin(0.5), cos(0.50));
for (int i = 0; i < octaves; ++i) {
v += a * noise(_st, seed);
_st = rot * _st * 2.0 + shift;
a *= 0.4;
}
return v + .4;
}
float pattern(vec2 uv, float seed, float time, inout vec2 q, inout vec2 r) {
q = vec2( fbm1( uv + vec2(0.0,0.0), seed ),
fbm1( uv + vec2(5.2,1.3), seed ) );
r = vec2( fbm1( uv + 4.0*q + vec2(1.7 - time / 2.,9.2), seed ),
fbm1( uv + 4.0*q + vec2(8.3 - time / 2.,2.8), seed ) );
float rtn = fbm1( uv + 4.0*r, seed );
return rtn;
}
mat2 rotate(float angle) {
return mat2(cos(angle), -sin(angle), sin(angle), cos(angle));
}
void main() {
vec2 uv = (gl_FragCoord.xy - 0.5 * u_resolution.xy) / u_resolution.y;
// Generate our displacement map
vec2 _uv = uv * NOISE_SIZE;
vec2 q = vec2(0.);
vec2 r = vec2(0.);
float pattern = pattern(_uv, seed, u_time/2., q, r);
uv += (.5 - pattern) / INTENSITY; // modulate the main UV coordinates by the pattern
// uv -= .5 / INTENSITY; // This just recenters the UV coords after the distortion
float len = length(uv) + .01;
float field = len+0.05*(-1.0*u_time / 5.); // The distance field from the middle to the edge
float ripple;
ripple = sin(field*80.0) * .5 * r.x * pattern + 1.; // The ripple pattern
ripple += smoothstep(0.2,.0,len); // Adding a core gradient to the sun. Essentially this is just a smoothed version of the distance field
ripple *= smoothstep(0.3,.9,clamp(1. - len * 1.5,0.0,1.0)); // Vignette the sun, making it smaller than infinity
ripple -= fract(ripple * 8.) / 100.; // Adds a nice sort of reflective element
vec3 colour = vec3(.2, .3, .4); // the basic colour
colour += ripple * length(r) * vec3(1., 1., .8); // ripple times sun colour
colour += (1. - pattern * REFLECTION_INTENSITY * .5) * smoothstep(0.0,.7,clamp(1. - len * 1.5,0.0,1.0)) * vec3(-.2, -.1, 0.); // vignette and reflection
colour += (1. - pattern * REFLECTION_INTENSITY * 2.) * smoothstep(0.5,.9,clamp(1. - len * 1.5,0.0,1.0)) * vec3(-.2, -.1, 0.); // vignette and reflection 2 - this is essentially a more intense, but reduced radius version of the previous one.
gl_FragColor = vec4(colour, 1.);
}
</script>
<div id="container"></div>
Please actively try to answer your own question first. If you were to read through the code, you would have seen this line here (105):
Put your RGB values into the vec3() function and the colour will change.
In the image above I've changed the values to Red (1) Green (0) and Blue (0), which results in a red colour rather than the default blue.
Hope this helps.
I am lately experimenting and learning webgl and shaders, as i am trying to build animation for the website background. I was able to port simple examples from shadertoy into three.js to play with it. Now i am trying to better understand more advanced examples and i am struggling with this particlar example:
https://www.shadertoy.com/view/4sfBWj
I am aware that to port shader program to three.js you need to:
create three.js basic setup with new THREE.PlaneGeometry()
create uniforms for iTime and iResolution
create vertex and fragment shaders script tags
populate fragment shader with shadertoy contents (image section)
populate vertex shader with generic script
change names to gl_FragColor and gl_FragCoord
change name of the function to void main(void)
if there is some texture used in one or more channels then
load texture with new THREE.TextureLoader() and create uniform for iChannel0
Basic examples will be good to go with above. However the one i linked has:
Buffer A
Buffer B
and both of these also include shader programs and main functions that runs them, how do i deal with it to be able to port it to three.js?
my current progress:
var container;
var camera, scene0, scene1, scene2, renderer;
var uniforms0, uniforms1, uniforms2;
var startTime;
var renderTarget0, renderTarget1;
var clock = new THREE.Clock();
init();
animate();
function init() {
container = document.getElementById( 'container' );
startTime = Date.now();
camera = new THREE.Camera();
camera.position.z = 1;
scene0 = new THREE.Scene();
scene1 = new THREE.Scene();
scene2 = new THREE.Scene();
renderTarget0 = new THREE.WebGLRenderTarget(window.innerWidth, window.innerHeight);
renderTarget1 = new THREE.WebGLRenderTarget(window.innerWidth, window.innerHeight);
/* scene0 */
var geometry0 = new THREE.PlaneGeometry(700, 394, 1, 1);
uniforms0 = {
iTime: { type: "f", value: 1.0 },
iResolution: { type: "v1", value: new THREE.Vector2(), }
};
var material0 = new THREE.ShaderMaterial( {
uniforms: uniforms0,
vertexShader: document.getElementById( 'vs0' ).textContent,
fragmentShader: document.getElementById( 'fs0' ).textContent
});
/* scene0 */
var mesh0 = new THREE.Mesh( geometry0, material0 );
/* scene1 */
var geometry1 = new THREE.PlaneGeometry(700, 394, 1, 1);
uniforms1 = {
iTime: { type: "f", value: 1.0 },
iResolution: { type: "v1", value: new THREE.Vector2(), }
};
var material1 = new THREE.ShaderMaterial( {
uniforms: uniforms1,
vertexShader: document.getElementById( 'vs1' ).textContent,
fragmentShader: document.getElementById( 'fs1' ).textContent,
iChannel0: {type: 't', value: renderTarget0 }
});
var mesh1 = new THREE.Mesh( geometry1, material1 );
/* scene1 */
/* scene2 */
var geometry2 = new THREE.PlaneGeometry(700, 394, 1, 1);
uniforms2 = {
iTime: { type: "f", value: 1.0 },
iResolution: { type: "v1", value: new THREE.Vector2(), }
};
var material2 = new THREE.ShaderMaterial( {
uniforms: uniforms1,
vertexShader: document.getElementById( 'vs2' ).textContent,
fragmentShader: document.getElementById( 'fs2' ).textContent,
iChannel0: {type: 't', value: renderTarget0 },
iChannel1: {type: 't', value: renderTarget1 }
});
var mesh2 = new THREE.Mesh( geometry2, material2 );
/* scene2 */
scene0.add( mesh0 );
scene1.add( mesh1 );
scene2.add( mesh2 );
renderer = new THREE.WebGLRenderer();
container.appendChild( renderer.domElement );
onWindowResize();
window.addEventListener( 'resize', onWindowResize, false );
}
function onWindowResize( event ) {
uniforms0.iResolution.value.x = window.innerWidth;
uniforms0.iResolution.value.y = window.innerHeight;
uniforms1.iResolution.value.x = window.innerWidth;
uniforms1.iResolution.value.y = window.innerHeight;
uniforms2.iResolution.value.x = window.innerWidth;
uniforms2.iResolution.value.y = window.innerHeight;
renderer.setSize( window.innerWidth, window.innerHeight );
}
function animate() {
requestAnimationFrame( animate );
render();
}
function render() {
//renderer.render(scene0, camera, renderTarget0);
//renderer.render(scene1, camera, renderTarget1);
uniforms1.iChannel0.value = rendertarget0.texture;
uniforms2.iChannel0.value = rendertarget0.texture;
uniforms2.iChannel1.value = rendertarget1.texture;
uniforms0.iTime.value += clock.getDelta();
uniforms1.iTime.value += clock.getDelta();
uniforms2.iTime.value += clock.getDelta();
//renderer.render( scene2, camera );
}
body {
margin:0;
padding:0;
overflow:hidden;
}
<script src="//threejs.org/build/three.min.js"></script>
<div id="container"></div>
<!-- BREAK --->
<script id="vs0" type="x-shader/x-vertex">
void main() {
vec4 mvPosition = modelViewMatrix * vec4(position, 1.0 );
gl_Position = projectionMatrix * mvPosition;
}
</script>
<script id="fs0" type="x-shader/x-fragment">
uniform vec2 iResolution;
uniform float iTime;
const mat2 m = mat2( 0.8, 0.6, -0.6, 0.8 );
const mat3 m3 = mat3( 0.8, 0.6, 0.0, -0.6, 0.80, 0.0, 0.0, 0.0, 1.0) *
mat3( 1.0, 0.0, 0.0, 0.0, -0.60, 0.80, 0.0, 0.8, 0.6) *
mat3( 0.8, 0.6, 0.0, -0.6, 0.80, 0.0, 0.0, 0.0, 1.0) *
mat3( 1.0, 0.0, 0.0, 0.0, -0.60, 0.80, 0.0, 0.8, 0.6);
float time;
float n1f0(float p) {
return fract(sin(p * 1.7227636) * 8.03e2);
}
float n1f1(float p) {
return fract(sin(p * 1.42736 + 1.12) * 5.1e2);
}
float n1f2(float p) {
return fract(sin(p * 1.22712 + 12.161) * 5.2e2);
}
float n3f(vec3 p) {
return fract(n1f0(p.x) + n1f1(p.y) + n1f2(p.z) + n1f0(p.x * 1.613) + n1f1(p.y * 3.112) + n1f2(p.z * 4.112));
}
float n3(vec3 p) {
vec3 b = floor(p);
vec3 e = b + vec3(1.0);
vec3 f = smoothstep(vec3(0.0), vec3(1.0), fract(p));
float c000 = n3f(b);
float c001 = n3f(vec3(b.x, b.y, e.z));
float c010 = n3f(vec3(b.x, e.y, b.z));
float c011 = n3f(vec3(b.x, e.y, e.z));
float c100 = n3f(vec3(e.x, b.y, b.z));
float c101 = n3f(vec3(e.x, b.y, e.z));
float c110 = n3f(vec3(e.x, e.y, b.z));
float c111 = n3f(e);
vec4 z = mix(vec4(c000, c100, c010, c110), vec4(c001, c101, c011, c111), f.z);
vec2 yz = mix(z.xy, z.zw, f.y);
return mix(yz.x, yz.y, f.x);
}
float fbm4( vec3 p )
{
float f = 0.0;
p = m3 * p;
f += 0.5000*n3( p ); p = m3*p*2.02;
f += 0.2500*n3( p ); p = m3*p*2.03;
f += 0.1250*n3( p ); p = m3*p*2.01;
f += 0.0625*n3( p );
return f/0.9375;
}
float fbm4( vec2 p )
{
return fbm4(vec3(p, time));
}
float fbm6( vec3 p )
{
float f = 0.0;
p = m3 * p;
f += 0.500000*n3( p ); p = m3*p*2.02;
f += 0.250000*n3( p ); p = m3*p*2.03;
f += 0.125000*n3( p ); p = m3*p*2.01;
f += 0.062500*n3( p ); p = m3*p*2.04;
f += 0.031250*n3( p ); p = m3*p*2.01;
f += 0.015625*n3( p );
return f/0.984375;
}
float fbm6( vec2 p )
{
return fbm6(vec3(p, time));
}
float grid(vec2 p) {
p = sin(p * 3.1415);
return smoothstep(-0.01, 0.01, p.x * p.y);
}
void main(void) {
time = iTime * 0.7;
vec2 q = gl_FragCoord.xy / iResolution.xy;
vec2 p = -1.0 + 2.0 * q;
p.x *= iResolution.x/iResolution.y;
p.y *= 0.3;
p.y -= time * 1.5;
float tc = time * 1.2;
float tw1 = time * 2.5;
float tw2 = time * 0.6;
vec3 vw1 = vec3(p, tw1);
vw1.y *= 2.8;
vec2 ofs1 = vec2(fbm4(vw1), fbm4(vw1 + vec3(10.0, 20.0, 50.0)));
ofs1.y *= 0.3;
ofs1.x *= 1.3;
vec3 vw2 = vec3(p, tw2);
vw2.y *= 0.8;
vec2 ofs2 = vec2(fbm4(vw2), fbm4(vw2 + vec3(10.0, 20.0, 50.0)));
ofs2.y *= 0.3;
ofs2.x *= 1.3;
vec2 vs = (p + ofs1 * 0.5 + ofs2 * 0.9) * 4.0;
vec3 vc = vec3(vs, tc);
float l;
l = fbm6(vc);
l = smoothstep(0.0, 1.0, l);
l = max(0.0, (l - pow(q.y * 0.8, 0.6)) * 1.8);
float r = pow(l , 1.5);
float g = pow(l , 3.0);
float b = pow(l , 6.0);
//r = grid(vs);
gl_FragColor = vec4( r, g, b, 1.0 );
}
</script>
<!-- BREAK --->
<script id="vs1" type="x-shader/x-vertex">
void main() {
vec4 mvPosition = modelViewMatrix * vec4(position, 1.0 );
gl_Position = projectionMatrix * mvPosition;
}
</script>
<script id="fs1" type="x-shader/x-fragment">
uniform vec2 iResolution;
uniform float iTime;
uniform sampler2D iChannel0;
#ifdef GL_ES
precision mediump float;
#endif
#define SIGMA 5.0
float normpdf(in float x, in float sigma)
{
return 0.39894*exp(-0.5*x*x/(sigma*sigma))/sigma;
}
void main(void) {
vec3 c = texture2D(iChannel0, gl_FragCoord.xy / iResolution.xy).rgb;
//declare stuff
const int mSize = int(SIGMA * 11.0/7.0);
const int kSize = (mSize-1)/2;
float kernel[mSize];
vec3 finalColor = vec3(0.0);
//create the 1-D kernel
float sigma = SIGMA;
float Z = 0.0;
for (int j = 0; j <= kSize; ++j)
{
kernel[kSize+j] = kernel[kSize-j] = normpdf(float(j), sigma);
}
//get the normalization factor (as the gaussian has been clamped)
for (int j = 0; j < mSize; ++j)
{
Z += kernel[j];
}
//read out the texels
for (int i=-kSize; i <= kSize; ++i)
{
for (int j=-kSize; j <= kSize; ++j)
{
finalColor += kernel[kSize+j]*kernel[kSize+i]*texture2D(iChannel0, (gl_FragCoord.xy+vec2(float(i),float(j))) / iResolution.xy).rgb;
}
}
finalColor /= Z*Z;
//finalColor = c + finalColor * 0.3;
gl_FragColor = vec4(finalColor, 1.0);
}
</script>
<!-- BREAK --->
<script id="vs2" type="x-shader/x-vertex">
void main() {
vec4 mvPosition = modelViewMatrix * vec4(position, 1.0 );
gl_Position = projectionMatrix * mvPosition;
}
</script>
<script id="fs2" type="x-shader/x-fragment">
uniform vec2 iResolution;
uniform float iTime;
uniform sampler2D iChannel0;
uniform sampler2D iChannel1;
#ifdef GL_ES
precision mediump float;
#endif
#define SIGMA 5.0
float normpdf(in float x, in float sigma)
{
return 0.39894*exp(-0.5*x*x/(sigma*sigma))/sigma;
}
void main(void) {
vec3 c = texture2D(iChannel0, gl_FragCoord.xy / iResolution.xy).rgb;
//gl_FragColor = vec4(c, 1.0);
//return;
//declare stuff
const int mSize = int(SIGMA * 11.0/7.0);
const int kSize = (mSize-1)/2;
float kernel[mSize];
vec3 finalColor = vec3(0.0);
//create the 1-D kernel
float sigma = SIGMA;
float Z = 0.0;
for (int j = 0; j <= kSize; ++j)
{
kernel[kSize+j] = kernel[kSize-j] = normpdf(float(j), sigma);
}
//get the normalization factor (as the gaussian has been clamped)
for (int j = 0; j < mSize; ++j)
{
Z += kernel[j];
}
//read out the texels
for (int i=-kSize; i <= kSize; ++i)
{
for (int j=-kSize; j <= kSize; ++j)
{
finalColor += kernel[kSize+j]*kernel[kSize+i]*texture2D(iChannel1, (gl_FragCoord.xy+vec2(float(i),float(j))) / iResolution.xy).rgb;
}
}
finalColor /= Z*Z;
finalColor = c + pow(finalColor, vec3(0.5)) * 0.5;
gl_FragColor = vec4(finalColor, 1.0);
}
</script>
This example uses multiple renders per frame. It works like this:
render shaderA to buffer
pass output to shaderB
render shaderB to buffer
pass output to shaderC
render shaderC to canvas
To replicate this in Three.js, you'll need a WebGLRenderTarget as intermediary to pass the output from one render as a texture to the next shader. Here's the pseudocode with only 2 renders, you'll need to extend it if you need more:
var renderer = new WebGLRenderer(w, h, ...);
var scene0 = new Scene();
var scene1 = new Scene();
var plane0 = new THREE.PlaneBufferGeometry(1, 1);
var plane1 = new THREE.PlaneBufferGeometry(1, 1);
// ... continue building materials, shaders, etc
// Add plane mesh to its corresponding scene
scene0.add(planeMesh0);
scene1.add(planeMesh1);
// You should only need one camera if they're both in the same position.
var cam = new Camera();
// renderTarget will store the first buffer
var renderTarget = new WebGLRenderTarget(w, h);
update() {
// This pass will render the first shader
// Its output will be drawn on the rendertarget's texture
renderer.render(scene0, cam, renderTarget);
// We assign the output of the first render pass to the second plane
plane1.uniforms.texture.value = rendertarget.texture;
// Now we render the second shader to the canvas
renderer.render(scene1, cam);
}
Keep in mind you have to render separate scenes in each pass to avoid recursion issues, so you'll have to add each plane to a separate scene. To learn more about WebGLRenderTarget you can read about it in the docs
I tried to create the snow effect like the one on the bottom page of this link http://blog.edankwan.com/post/my-first-christmas-experiment. Everything else works fine But just can't make the motion blur effect work. Any ideas?
the texture sprite used to achieve the motion blur effect
here is the code:
(function(global) {
var img = 'https://i.imgur.com/hlmsgWA.png'
var renderer, scene, camera
var w = 800, h = 320
var uniforms
var geometry
var texture, material
var gui
var conf = {
amount: 200,
speed: 0.5,
time: 0
}
var obj = {
init: function() {
renderer = new THREE.WebGLRenderer({
antialias: true
})
renderer.setPixelRatio(window.devicePixelRatio)
renderer.setSize(w, h)
camera = new THREE.Camera
scene = new THREE.Scene()
geometry = new THREE.BufferGeometry()
var positions = []
for(var i = 0, l = conf.amount; i < l; i++) {
positions[i * 3] = Math.random() * 800 - 400
positions[i * 3 + 1] = i
positions[i * 3 + 2] = Math.random() * 800
}
geometry.addAttribute('position', new THREE.Float32BufferAttribute(positions, 3))
var vs = document.getElementById('vertexShader').textContent
var fs = document.getElementById('fragmentShader').textContent
uniforms = {
u_amount: {
type: 'f',
value: conf.amount
},
u_speed: {
type: 'f',
value: conf.speed
},
u_time: {
type: 'f',
value: conf.time
},
u_resolution: {
type: 'vec2',
value: new THREE.Vector2(w, h)
},
u_texture : {
value: new THREE.TextureLoader().load(img)
}
}
material = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: vs,
fragmentShader: fs,
transparent: true
})
var points = new THREE.Points(geometry, material)
scene.add(points)
document.body.appendChild(renderer.domElement)
this.render()
this.createGui()
},
createGui: function() {
gui = new dat.GUI()
gui.add(conf, 'speed', -1, 1)
},
render: function() {
requestAnimationFrame(this.render.bind(this))
uniforms.u_time.value += conf.speed * 0.003
renderer.render(scene, camera)
}
}
obj.init()
})(window)
<script id="vertexShader" type="x-shader/x-vertex">
precision highp float;
vec3 getPosOffset(float ratio, float thershold) {
return vec3(
cos((ratio * 80.0 + 10.0) * thershold) * 20.0 * thershold,
(sin((ratio * 90.0 + 30.0) * thershold) + 1.0) * 5.0 * thershold + mix(500.0, -500.0, ratio / thershold),
sin((ratio * 70.0 + 20.0) * thershold) * 20.0 * thershold
);
}
uniform vec2 u_resolution;
uniform float u_amount;
uniform float u_speed;
uniform float u_time;
varying float v_alpha;
varying float v_rotation;
varying float v_index;
void main() {
float indexRatio = position.y / u_amount;
float thershold = 0.7 + indexRatio * 0.3;
float ratio = mod(u_time - indexRatio * 3.0, thershold);
float prevRatio = mod(u_time - u_speed - indexRatio * 3.0, thershold);
vec3 offsetPos = getPosOffset(ratio, thershold);
vec3 prevOffsetPos = getPosOffset(prevRatio, thershold);
vec3 pos = position;
pos += offsetPos;
float perspective = (2000.0 - pos.z) / 2000.0;
pos.x *= perspective;
pos.y *= perspective;
float delta = length(offsetPos.xy - prevOffsetPos.xy);
float maxDelta = 2.7;
v_index = floor(pow(clamp(delta, 0.0, maxDelta) / maxDelta, 5.0) * 15.99);
v_rotation = atan((offsetPos.x - prevOffsetPos.x) / (offsetPos.y - prevOffsetPos.y));
pos.x *= 2.0 / u_resolution.x;
pos.y *= 2.0 / u_resolution.y;
pos.z = 0.0;
gl_Position = projectionMatrix * modelViewMatrix * vec4(pos, 1.0);
gl_PointSize = 24.0 * perspective;
v_alpha = perspective;
}
</script>
<script id="fragmentShader" type="x-shader/x-fragment">
uniform sampler2D u_texture;
varying float v_rotation;
varying float v_index;
varying float v_alpha;
void main() {
vec2 coord = gl_PointCoord.xy;
coord = vec2(
clamp(cos(v_rotation) * (coord.x - 0.5) + sin(v_rotation) * (coord.y - 0.5) + 0.5, 0.0, 1.0),
clamp(cos(v_rotation) * (coord.y - 0.5) - sin(v_rotation) * (coord.x - 0.5) + 0.5, 0.0, 1.0)
);
float index = floor(v_index + 0.5);
coord.y = - coord.y + 4.0;
coord.x += (index - floor(index / 4.0) * 4.0);
coord.y -= floor(index / 4.0);
coord *= 0.25;
vec4 color = texture2D(u_texture, coord);
color.a *= v_alpha;
gl_FragColor = color;
}
</script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/99/three.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/dat-gui/0.7.3/dat.gui.js"></script>
http://blog.edankwan.com/post/my-first-christmas-experiment.
In snippet code you treat variable speed differently in your update loop and in vertex shader.
Update: uniforms.u_time.value += conf.speed * 0.003
Shader usage: float prevRatio = mod(u_time - u_speed - indexRatio * 3.0, thershold);
You'll get desired result if you change u_speed to u_speed * 0.003 in shader code (or better move that multiplication to definition in javascript).
Fast way to debug such things - pass your values to output color and check if it is what you expect.
==================
If someone wants to make tails with non-flat curvature - you can store last N points of each particle path. Then you can fully compute trail mesh geometry on CPU and stream it to GPU.
Or another way: you can upload all paths to single Uniform Buffer Object and find right point to fetch with pointId and uv of tail mesh vertex.
I am trying to reproduce the three.js panaorama dualfisheye example using Three.js r71.
I need to stick to r71 because eventually I will use this code on autodesk forge viewer which is based on Three.js r71.
I made some progress, but I am facing an error message in the browser javascript console saying: RENDER WARNING: there is no texture bound to the unit 0
var camera, scene, renderer;
var isUserInteracting = false,
onMouseDownMouseX = 0, onMouseDownMouseY = 0,
lon = 0, onMouseDownLon = 0,
lat = 0, onMouseDownLat = 0,
phi = 0, theta = 0,
distance = 500;
init();
animate();
function init() {
var container, mesh;
container = document.getElementById('container');
camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 1, 2000);
scene = new THREE.Scene();
// var geometry = new THREE.SphereBufferGeometry( 500, 60, 40 ).toNonIndexed();
var geometry = new THREE.SphereGeometry(500, 60, 40);
// invert the geometry on the x-axis so that all of the faces point inward
// geometry.scale( - 1, 1, 1 );
geometry.applyMatrix(new THREE.Matrix4().makeScale(-1, 1, 1));
// Remap UVs
// var normals = geometry.attributes.normal.array;
var normals = [];
geometry.faces.forEach(element => {
normals.push(element.normal)
});
var uvs = geometry.faceVertexUvs
// var uvs = geometry.attributes.uv.array;
for (var i = 0, l = normals.length / 3; i < l; i++) {
var x = normals[i * 3 + 0];
var y = normals[i * 3 + 1];
var z = normals[i * 3 + 2];
if (i < l / 2) {
var correction = (x == 0 && z == 0) ? 1 : (Math.acos(y) / Math.sqrt(x * x + z * z)) * (2 / Math.PI);
uvs[i * 2 + 0] = x * (404 / 1920) * correction + (447 / 1920);
uvs[i * 2 + 1] = z * (404 / 1080) * correction + (582 / 1080);
} else {
var correction = (x == 0 && z == 0) ? 1 : (Math.acos(- y) / Math.sqrt(x * x + z * z)) * (2 / Math.PI);
uvs[i * 2 + 0] = - x * (404 / 1920) * correction + (1460 / 1920);
uvs[i * 2 + 1] = z * (404 / 1080) * correction + (582 / 1080);
}
}
geometry.applyMatrix(new THREE.Matrix4().makeRotationZ(-Math.PI / 2))
// geometry.rotateZ( - Math.PI / 2 );
//
// var texture = new THREE.TextureLoader().load( 'ricoh_theta_s.jpg' );
var texture = new THREE.TextureLoader('https://preview.ibb.co/hZXYmz/ricoh_theta_s.jpg');
this.texture = texture;
texture.format = THREE.RGBFormat;
var material = new THREE.MeshBasicMaterial({ map: texture });
material.map.repeat = { x: 0, y: 0 }
material.map.offset = { x: 0, y: 0 };
mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
container.appendChild(renderer.domElement);
document.addEventListener('mousedown', onDocumentMouseDown, false);
document.addEventListener('mousemove', onDocumentMouseMove, false);
document.addEventListener('mouseup', onDocumentMouseUp, false);
document.addEventListener('wheel', onDocumentMouseWheel, false);
//
window.addEventListener('resize', onWindowResize, false);
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
function onDocumentMouseDown(event) {
event.preventDefault();
isUserInteracting = true;
onPointerDownPointerX = event.clientX;
onPointerDownPointerY = event.clientY;
onPointerDownLon = lon;
onPointerDownLat = lat;
}
function onDocumentMouseMove(event) {
if (isUserInteracting === true) {
lon = (onPointerDownPointerX - event.clientX) * 0.1 + onPointerDownLon;
lat = (onPointerDownPointerY - event.clientY) * 0.1 + onPointerDownLat;
}
}
function onDocumentMouseUp(event) {
isUserInteracting = false;
}
function onDocumentMouseWheel(event) {
distance += event.deltaY * 0.05;
distance = THREE.Math.clamp(distance, 400, 1000);
}
function animate() {
// requestAnimationFrame(animate);
update();
}
function update() {
if (isUserInteracting === false) {
lon += 0.1;
}
lat = Math.max(- 85, Math.min(85, lat));
phi = THREE.Math.degToRad(90 - lat);
theta = THREE.Math.degToRad(lon - 180);
camera.position.x = distance * Math.sin(phi) * Math.cos(theta);
camera.position.y = distance * Math.cos(phi);
camera.position.z = distance * Math.sin(phi) * Math.sin(theta);
camera.lookAt(scene.position);
renderer.render(scene, camera);
}
body {
background-color: #000000;
margin: 0px;
overflow: hidden;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/71/three.js"></script>
<div id="container"></div>
Thank you for your time.
There are a bunch of issues with the code
The loading code is wrong for r71. It should be something like this
THREE.ImageUtils.crossOrigin = '';
var texture = THREE.ImageUtils.loadTexture('https://preview.ibb.co/hZXYmz/ricoh_theta_s.jpg');
IIRC THREE r71 didn't pre-init textures with something renderable so you need to wait for the texture to load
var texture = THREE.ImageUtils.loadTexture(
'https://preview.ibb.co/hZXYmz/ricoh_theta_s.jpg',
undefined,
animate); // call animate after texture has loaded
and removed the call to animate at the top
That will get rid of the warning but continuing on
The code sets the repeat to 0
material.map.repeat = { x: 0, y: 0 };
material.map.offset = { x: 0, y: 0 };
Setting the repeat to 0 means you'll only see the first pixel of the texture since all UVs will be multiplied by 0
It code sets the repeat and offset incorrectly.
The correct way to set repeat and offset is with set
material.map.repeat.set(1, 1);
material.map.offset.set(0, 0);
It works the other way but only by luck. The 2 settings are THREE.Vector2
objects. The code using repeat and offset could change at any time to
use methods on THREE.Vector2 or pass the repeat and offset to functions
that expect a THREE.Vector2 so it's best not to replace them
note that there is no reason to set them though. 1 1 for repeat and 0 0 for offset are the defaults.
The code only renders once
requestAnimationFrame was commented out. Textures load asynchronously
so you won't see the texture for several frames. You either need to wait
for the texture to load before rendering, render again once it has finish
loading or, render continuously so that when it happens to load it's used
The code is using a cross domain image
This isn't actually a bug, just a warning. WebGL can't use cross domain
images unless the server itself gives permission. The one the code links
to does give that permission but I wasn't sure if you're aware of that
or were just lucky. The majority of images from servers not your own
are unlikely to work.
The code's uv math is wrong
You should ask another question for that. Commenting that out I can see the texture
var camera, scene, renderer;
var isUserInteracting = false,
onMouseDownMouseX = 0, onMouseDownMouseY = 0,
lon = 0, onMouseDownLon = 0,
lat = 0, onMouseDownLat = 0,
phi = 0, theta = 0,
distance = 500;
init();
function init() {
var container, mesh;
container = document.getElementById('container');
camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 1, 2000);
scene = new THREE.Scene();
// var geometry = new THREE.SphereBufferGeometry( 500, 60, 40 ).toNonIndexed();
var geometry = new THREE.SphereGeometry(500, 60, 40);
// invert the geometry on the x-axis so that all of the faces point inward
// geometry.scale( - 1, 1, 1 );
geometry.applyMatrix(new THREE.Matrix4().makeScale(-1, 1, 1));
// Remap UVs
// var normals = geometry.attributes.normal.array;
var normals = [];
geometry.faces.forEach(element => {
normals.push(element.normal)
});
var uvs = geometry.faceVertexUvs
// var uvs = geometry.attributes.uv.array;
for (var i = 0, l = normals.length / 3; i < l; i++) {
var x = normals[i * 3 + 0];
var y = normals[i * 3 + 1];
var z = normals[i * 3 + 2];
if (i < l / 2) {
var correction = (x == 0 && z == 0) ? 1 : (Math.acos(y) / Math.sqrt(x * x + z * z)) * (2 / Math.PI);
// uvs[i * 2 + 0] = x * (404 / 1920) * correction + (447 / 1920);
// uvs[i * 2 + 1] = z * (404 / 1080) * correction + (582 / 1080);
} else {
var correction = (x == 0 && z == 0) ? 1 : (Math.acos(- y) / Math.sqrt(x * x + z * z)) * (2 / Math.PI);
// uvs[i * 2 + 0] = - x * (404 / 1920) * correction + (1460 / 1920);
// uvs[i * 2 + 1] = z * (404 / 1080) * correction + (582 / 1080);
}
}
geometry.applyMatrix(new THREE.Matrix4().makeRotationZ(-Math.PI / 2))
// geometry.rotateZ( - Math.PI / 2 );
//
THREE.ImageUtils.crossOrigin = '';
var texture = THREE.ImageUtils.loadTexture('https://preview.ibb.co/hZXYmz/ricoh_theta_s.jpg', undefined, animate);
var material = new THREE.MeshBasicMaterial({ map: texture });
material.map.repeat.set(1, 1);
material.map.offset.set(0, 0);
mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
container.appendChild(renderer.domElement);
document.addEventListener('mousedown', onDocumentMouseDown, false);
document.addEventListener('mousemove', onDocumentMouseMove, false);
document.addEventListener('mouseup', onDocumentMouseUp, false);
document.addEventListener('wheel', onDocumentMouseWheel, false);
//
window.addEventListener('resize', onWindowResize, false);
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
function onDocumentMouseDown(event) {
event.preventDefault();
isUserInteracting = true;
onPointerDownPointerX = event.clientX;
onPointerDownPointerY = event.clientY;
onPointerDownLon = lon;
onPointerDownLat = lat;
}
function onDocumentMouseMove(event) {
if (isUserInteracting === true) {
lon = (onPointerDownPointerX - event.clientX) * 0.1 + onPointerDownLon;
lat = (onPointerDownPointerY - event.clientY) * 0.1 + onPointerDownLat;
}
}
function onDocumentMouseUp(event) {
isUserInteracting = false;
}
function onDocumentMouseWheel(event) {
distance += event.deltaY * 0.05;
distance = THREE.Math.clamp(distance, 400, 1000);
}
function animate() {
requestAnimationFrame(animate);
update();
}
function update() {
if (isUserInteracting === false) {
lon += 0.1;
}
lat = Math.max(- 85, Math.min(85, lat));
phi = THREE.Math.degToRad(90 - lat);
theta = THREE.Math.degToRad(lon - 180);
camera.position.x = distance * Math.sin(phi) * Math.cos(theta);
camera.position.y = distance * Math.cos(phi);
camera.position.z = distance * Math.sin(phi) * Math.sin(theta);
camera.lookAt(scene.position);
renderer.render(scene, camera);
}
body {
background-color: #000000;
margin: 0px;
overflow: hidden;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/71/three.js"></script>
<div id="container"></div>
For those just looking for an answer to the warning
RENDER WARNING: there is no texture bound to the unit 0
It is issued by Chrome when:
The bound shader program expects a texture.
No texture is bound.
Source and further links:
https://github.com/NASAWorldWind/WebWorldWind/issues/302#issuecomment-346188472
The fix then, is to always bind a texture to the shader samplers, even when the shader won't use it.
As gman suggested in his longer answer, binding a white 1px texture when "there is no texture" is good practice because it won't consume much space or bandwidth and the shader code can use it to multiply with another colour without altering it.