Setting a SVG fill using Javascript - javascript

I am trying to change the color of an SVG image using audio from the microphone.
I have got the logic written, the color is updated on the SVG element, but nothing changes visually.
window.onload = function () {
if (navigator.getUserMedia) {
navigator.getUserMedia(
{ audio: true },
(stream) => {
var context = new AudioContext();
var src = context.createMediaStreamSource(stream);
var analyser = context.createAnalyser();
var svg = document.querySelector("#rangoli");
src.connect(analyser);
analyser.connect(context.destination);
analyser.fftSize = 256;
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
function renderFrame() {
requestAnimationFrame(renderFrame);
analyser.getByteFrequencyData(dataArray);
for (var i = 0; i < bufferLength; i++) {
barHeight = dataArray[i] / 2;
var r = barHeight + 25 * (i / bufferLength);
var g = 250 * (i / bufferLength);
var b = 50;
svg.setAttribute("fill", `rgb(${r}, ${g}, ${b})`);
}
}
renderFrame();
},
() => console.log("Error")
);
}
};
Here is the SVG updating the fill value:
Here is a CodePen demo:
https://codepen.io/GR8z/pen/qBjopZG

Related

AudioContext and animation do not work on iPhone

I have this little code snippet here
var context = new(window.AudioContext || window.webkitAudioContext)();
.....
var source = context.createMediaElementSource(audio);
var analyser = context.createAnalyser();
var canvas = document.getElementById("canvas");
var ctx = canvas.getContext("2d");
source.connect(analyser);
analyser.connect(context.destination);
analyser.fftSize = 256;
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
var WIDTH = canvas.width;
var HEIGHT = canvas.height + (canvas.height * 0.7);
var barWidth = (WIDTH / bufferLength) * 2.5;
var barHeight;
var x = 0;
function renderFrame() {
x = 0;
ctx.fillStyle = "#000";
ctx.fillRect(0, 0, WIDTH, HEIGHT);
analyser.getByteFrequencyData(dataArray);
for (var i = 0; i < bufferLength; i++) {
barHeight = dataArray[i];
var r = barHeight + (25 * (i / bufferLength));
var g = 250 * (i / bufferLength);
var b = 50;
ctx.fillStyle = "rgb(" + r + "," + g + "," + b + ")";
ctx.fillRect(x, HEIGHT - barHeight, barWidth, barHeight);
x += barWidth + 1;
}
requestAnimationFrame(renderFrame);
}
requestAnimationFrame(renderFrame);
audio.src = jQuery("source").attr("src");
audio.play();
Full code and demo here: https://jsfiddle.net/6gv39mk0/1/show
That basically shows a dynamic waveform when audio is played
It works on desktop browser, android browser, but no way to make it works on iPhone. Am I missing something or is there something that I must declare/do differently?
I've solved this issue with a transparent button on top of play button. this probably make iPhone browsers understand that there is an interaction. After that the button is hidden (z-index -1)
It's not that elegant as solution, but it work!
var button = document.createElement('button');
jQuery(button).attr('id', 'btn');
jQuery(canva).before(button);
jQuery(button).on('click', function() {
start();
jQuery(button).remove();
})
here it is the fiddle https://jsfiddle.net/L1trwz7o/

Reactjs recorder-js download

I am trying to make a React app for recording voice samples and save them as wav files, I used recorder-js to make the record but I am still struggling to make the saving part as wav file, and I got the exporWAV is not a function everytime.
If you've been there please help
Sample code is here
//create
var record_recorder;
record_recorder = new RecorderV2(input,{sampleRate:44100, numChannels:2});
//start
record_recorder.record();
//stop
record_recorder.stop();
//export
record_recorder.exportWAV(function(blob) {
var url = URL.createObjectURL(blob);
var li = document.createElement('div');
var au = document.createElement('audio');
var hf = document.createElement('a');
var br = document.createElement('br');
record_result_blob = blob;
au.controls = true;
au.src = url;
hf.href = url;
hf.download = 'record.wav';
hf.innerHTML = hf.download;
li.appendChild(au);
li.appendChild(br);
li.appendChild(hf);
if (result)
result(li);
});
See the example website:
https://voice-recorder-online.com
code:
https://voice-recorder-online.com/js/index_editor.js
Here's How u can make waves depending on your voice frequency and record and save files in wav format.
(async() => {
let leftchannel = [];
let rightchannel = [];
let recorder = null;
let recording = false;
let recordingLength = 0;
let volume = null;
let audioInput = null;
let sampleRate = null;
let AudioContext = window.AudioContext || window.webkitAudioContext;
let context = null;
let analyser = null;
let canvas = document.querySelector("canvas");
let canvasCtx = canvas.getContext("2d");
let visualSelect = document.querySelector("#visSelect");
let micSelect = document.querySelector("#micSelect");
let stream = null;
let tested = false;
try {
window.stream = stream = await getStream();
console.log("Got stream");
} catch (err) {
alert("Issue getting mic", err);
}
const deviceInfos = await navigator.mediaDevices.enumerateDevices();
var mics = [];
for (let i = 0; i !== deviceInfos.length; ++i) {
let deviceInfo = deviceInfos[i];
if (deviceInfo.kind === "audioinput") {
mics.push(deviceInfo);
let label = deviceInfo.label || "Microphone " + mics.length;
console.log("Mic ", label + " " + deviceInfo.deviceId);
const option = document.createElement("option");
option.value = deviceInfo.deviceId;
option.text = label;
micSelect.appendChild(option);
}
}
function getStream(constraints) {
if (!constraints) {
constraints = {
audio: true,
video: false
};
}
return navigator.mediaDevices.getUserMedia(constraints);
}
setUpRecording();
function setUpRecording() {
context = new AudioContext();
sampleRate = context.sampleRate;
// creates a gain node
volume = context.createGain();
// creates an audio node from teh microphone incoming stream
audioInput = context.createMediaStreamSource(stream);
// Create analyser
analyser = context.createAnalyser();
// connect audio input to the analyser
audioInput.connect(analyser);
// connect analyser to the volume control
// analyser.connect(volume);
let bufferSize = 2048;
let recorder = context.createScriptProcessor(bufferSize, 2, 2);
// we connect the volume control to the processor
// volume.connect(recorder);
analyser.connect(recorder);
// finally connect the processor to the output
recorder.connect(context.destination);
recorder.onaudioprocess = function(e) {
// Check
if (!recording) return;
// Do something with the data, i.e Convert this to WAV
console.log("recording");
let left = e.inputBuffer.getChannelData(0);
let right = e.inputBuffer.getChannelData(1);
if (!tested) {
tested = true;
// if this reduces to 0 we are not getting any sound
if (!left.reduce((a, b) => a + b)) {
alert("There seems to be an issue with your Mic");
// clean up;
stop();
stream.getTracks().forEach(function(track) {
track.stop();
});
context.close();
}
}
// we clone the samples
leftchannel.push(new Float32Array(left));
rightchannel.push(new Float32Array(right));
recordingLength += bufferSize;
};
visualize();
}
function mergeBuffers(channelBuffer, recordingLength) {
let result = new Float32Array(recordingLength);
let offset = 0;
let lng = channelBuffer.length;
for (let i = 0; i < lng; i++) {
let buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function interleave(leftChannel, rightChannel) {
let length = leftChannel.length + rightChannel.length;
let result = new Float32Array(length);
let inputIndex = 0;
for (let index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
function writeUTFBytes(view, offset, string) {
let lng = string.length;
for (let i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
function start() {
recording = true;
document.querySelector("#msg").style.visibility = "visible";
// reset the buffers for the new recording
leftchannel.length = rightchannel.length = 0;
recordingLength = 0;
console.log("context: ", !!context);
if (!context) setUpRecording();
}
function stop() {
console.log("Stop");
recording = false;
document.querySelector("#msg").style.visibility = "hidden";
// we flat the left and right channels down
let leftBuffer = mergeBuffers(leftchannel, recordingLength);
let rightBuffer = mergeBuffers(rightchannel, recordingLength);
// we interleave both channels together
let interleaved = interleave(leftBuffer, rightBuffer);
///////////// WAV Encode /////////////////
// from http://typedarray.org/from-microphone-to-wav-with-getusermedia-and-web-audio/
//
// we create our wav file
let buffer = new ArrayBuffer(44 + interleaved.length * 2);
let view = new DataView(buffer);
// RIFF chunk descriptor
writeUTFBytes(view, 0, "RIFF");
view.setUint32(4, 44 + interleaved.length * 2, true);
writeUTFBytes(view, 8, "WAVE");
// FMT sub-chunk
writeUTFBytes(view, 12, "fmt ");
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
// stereo (2 channels)
view.setUint16(22, 2, true);
view.setUint32(24, sampleRate, true);
view.setUint32(28, sampleRate * 4, true);
view.setUint16(32, 4, true);
view.setUint16(34, 16, true);
// data sub-chunk
writeUTFBytes(view, 36, "data");
view.setUint32(40, interleaved.length * 2, true);
// write the PCM samples
let lng = interleaved.length;
let index = 44;
let volume = 1;
for (let i = 0; i < lng; i++) {
view.setInt16(index, interleaved[i] * (0x7fff * volume), true);
index += 2;
}
// our final binary blob
const blob = new Blob([view], {
type: "audio/wav"
});
const audioUrl = URL.createObjectURL(blob);
console.log("BLOB ", blob);
console.log("URL ", audioUrl);
document.querySelector("#audio").setAttribute("src", audioUrl);
const link = document.querySelector("#download");
link.setAttribute("href", audioUrl);
link.download = "output.wav";
}
// Visualizer function from
// https://webaudiodemos.appspot.com/AudioRecorder/index.html
//
function visualize() {
WIDTH = canvas.width;
HEIGHT = canvas.height;
CENTERX = canvas.width / 2;
CENTERY = canvas.height / 2;
let visualSetting = visualSelect.value;
console.log(visualSetting);
if (!analyser) return;
if (visualSetting === "sinewave") {
analyser.fftSize = 2048;
var bufferLength = analyser.fftSize;
console.log(bufferLength);
var dataArray = new Uint8Array(bufferLength);
canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
var draw = function() {
drawVisual = requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
canvasCtx.fillStyle = "rgb(200, 200, 200)";
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = "rgb(0, 0, 0)";
canvasCtx.beginPath();
var sliceWidth = (WIDTH * 1.0) / bufferLength;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = (v * HEIGHT) / 2;
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
draw();
} else if (visualSetting == "frequencybars") {
analyser.fftSize = 64;
var bufferLengthAlt = analyser.frequencyBinCount;
console.log(bufferLengthAlt);
var dataArrayAlt = new Uint8Array(bufferLengthAlt);
canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
var drawAlt = function() {
drawVisual = requestAnimationFrame(drawAlt);
analyser.getByteFrequencyData(dataArrayAlt);
canvasCtx.fillStyle = "rgb(0, 0, 0)";
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
var barWidth = WIDTH / bufferLengthAlt;
var barHeight;
var x = 0;
for (var i = 0; i < bufferLengthAlt; i++) {
barHeight = dataArrayAlt[i];
canvasCtx.fillStyle = "rgb(" + (barHeight + 100) + ",50,50)";
canvasCtx.fillRect(
x,
HEIGHT - barHeight / 2,
barWidth,
barHeight / 2
);
x += barWidth + 1;
}
};
drawAlt();
} else if (visualSetting == "circle") {
analyser.fftSize = 32;
let bufferLength = analyser.frequencyBinCount;
console.log(bufferLength);
let dataArray = new Uint8Array(bufferLength);
canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
let draw = () => {
drawVisual = requestAnimationFrame(draw);
analyser.getByteFrequencyData(dataArray);
canvasCtx.fillStyle = "rgb(0, 0, 0)";
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
// let radius = dataArray.reduce((a,b) => a + b) / bufferLength;
let radius = dataArray[2] / 2;
if (radius < 20) radius = 20;
if (radius > 100) radius = 100;
// console.log('Radius ', radius)
canvasCtx.beginPath();
canvasCtx.arc(CENTERX, CENTERY, radius, 0, 2 * Math.PI, false);
// canvasCtx.fillStyle = 'rgb(50,50,' + (radius+100) +')';
// canvasCtx.fill();
canvasCtx.lineWidth = 6;
canvasCtx.strokeStyle = "rgb(50,50," + (radius + 100) + ")";
canvasCtx.stroke();
};
draw();
}
}
#msg {
visibility: hidden;
color: red;
font-weight: bold;
font-size: 22px;
font-family: Verdana;
}
button {
padding: 5px 10px;
border: 1px solid grey;
font-size: 18px;
background: white;
}
.audio-controls {
display: flex;
align-items: center;
padding-top: 20px;
justify-content: center;
}
.audio-controls button {
margin: 0px 5px;
}
canvas {
margin-top: 10px;
background-color: black;
}
select {
height: 25px;
margin: 0px 5px;
}
a {
margin-left: 20px;
}
.app {
text-align: center;
padding-top: 20px;
}
<div class="app">
<select name="" id="micSelect"></select>
<select id="visSelect">
<option value="frequencybars">Bar</option>
<option value="sinewave">Wave</option>
<option value="circle">Circle</option>
</select>
<a id="download">Download</a>
<div class="audio-controls">
<button id="record">Record</button>
<button id="stop">Stop</button>
<audio id="audio" controls></audio>
</div>
<div id="msg">Recording...</div>
<canvas width="500" height="300"></canvas>
<div>
Live Demo: https://codepen.io/furki911/pen/jOYpvMx

Detect the pitch of a live audio input in the browser

How can you detect the pitch of a live audio input in the browser?
The below code will get you 1,024 frequency values. However I don't know how to go from this to actual pitches (e.g. A#).
const audioContext = new window.AudioContext();
const analyser = audioContext.createAnalyser();
navigator.getUserMedia(
{ audio: true },
stream => {
audioContext.createMediaStreamSource(stream).connect(analyser);
const dataArray = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteTimeDomainData(dataArray);
// Log the contents of the analyzer ever 500ms.
setInterval(() => {
console.log(dataArray.length);
}, 500);
},
err => console.log(err)
);
What you are currently accessing is the Time Domain Data, and can not be used to retrieve a note (which seems to be what you want).
What you'd want is the Frequency Domain, using AnalyserNode.get[XXX]FrequencyData, from which you could get which frequencies are louder or more quiet.
However, since most sound is made of harmonies, you can't retrieve what note was played from a microphone, add to this that we only have access to limited resolution, and not only will you be unable to retrieve a note from a microphone, but not even from a virtual oscillator either.
Below example has been made from this Q/A and examples from MDN;
const canvasCtx = canvas.getContext('2d');
const WIDTH = canvas.width = 500;
const HEIGHT = canvas.height = 150;
const audioCtx = new (window.AudioContext || window.webkitAudioContext);
const analyser = audioCtx.createAnalyser();
const nyquist = audioCtx.sampleRate / 2;
// highest precision
analyser.fftSize = 32768;
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
const osc = audioCtx.createOscillator();
osc.frequency.value = 400;
osc.connect(analyser);
osc.connect(audioCtx.destination);
range.oninput = e => {
osc.frequency.value = range.value;
};
if(!audioCtx.state || audioCtx.state === 'running') {
begin();
}
else {
log.textContent = 'click anywhere to begin';
onclick = e => {
onclick = null;
begin()
}
}
function begin() {
osc.start(0);
draw();
}
function draw() {
requestAnimationFrame(draw);
// get the Frequency Domain
analyser.getByteFrequencyData(dataArray);
canvasCtx.fillStyle = 'rgb(0, 0, 0)';
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
const barWidth = (WIDTH / bufferLength) * 2.5;
let max_val = -Infinity;
let max_index = -1;
let x = 0;
for(let i = 0; i < bufferLength; i++) {
let barHeight = dataArray[i];
if(barHeight > max_val) {
max_val = barHeight;
max_index = i;
}
canvasCtx.fillStyle = 'rgb(' + (barHeight+100) + ',50,50)';
canvasCtx.fillRect(x,HEIGHT-barHeight/2,barWidth,barHeight/2);
x += barWidth;
}
log.textContent = `loudest freq: ${max_index * (nyquist / bufferLength)}
real value: ${range.value}`;
}
#log{display: inline-block; margin:0 12px}
#canvas{display: block;}
<input id="range" type="range" min="0" max="1000" value="400"><pre id="log"></pre>
<canvas id="canvas"></canvas>

(Three.js) Change color on particles forming image

I'm currently working on a particle system where the particles move around and create pictures with some seconds in between (almost like a slide). I know where to put the color code to change the color on the particles forming the pictures bur for some reason it doesn't work. I therefore suspect that the problem is somewhere else in the script but the question is where...
You can see the code below (tried to make a codepen but it didn't work):
var dispersed = false;
var firstDone = false;
var secondDone = false;
var thirdDone = false;
var fRgba = []; // first img rgba data
var sRgba = []; // second img rgba data
var tRgba = []; // third img rgba data
var WIDTH = window.innerWidth,
HEIGHT = window.innerHeight;
var VIEW_ANGLE = 45,
ASPECT = WIDTH / HEIGHT,
NEAR = 0.01,
FAR = 10000;
var $container = $("#container");
var renderer = new THREE.WebGLRenderer();
var camera = new THREE.PerspectiveCamera(
VIEW_ANGLE,
ASPECT,
NEAR,
FAR);
var scene = new THREE.Scene();
scene.add(camera);
camera.position.z = 900;
renderer.setSize(WIDTH, HEIGHT);
$container.append(renderer.domElement);
var particleCount = 5200,
particles = new THREE.Geometry();
var pMaterial = new THREE.PointsMaterial({
size: 6,
map: createCircleTexture('#CACACA', 256),
transparent: true,
depthWrite: false
});
function createCircleTexture(color, size) {
var matCanvas = document.createElement('canvas');
matCanvas.width = matCanvas.height = size;
var matContext = matCanvas.getContext('2d');
var texture = new THREE.Texture(matCanvas);
var center = size / 2;
matContext.beginPath();
matContext.arc(center, center, size/2, 0, 2 * Math.PI, false);
matContext.closePath();
matContext.fillStyle = color;
matContext.fill();
texture.needsUpdate = true;
return texture;
}
for (var i = 0; i < particleCount; i++) {
var x = Math.random() * 1600 - 800;
var y = getRandomInt(600, 1500)
var z = Math.random() * 30 - 15;
var particle = new THREE.Vector3(x, y, z);
particle.updated = 0;
particles.vertices.push(particle);
};
var particleSystem = new THREE.Points(particles, pMaterial);
particleSystem.sortParticles = true;
scene.add(particleSystem);
function drawImage(imageObj, array) {
var canvas = $("#canvas")[0];
var context = canvas.getContext("2d");
var imageX = 0;
var imageY = 0;
var imageWidth = imageObj.width;
var imageHeight = imageObj.height;
context.drawImage(imageObj, imageX, imageY);
var imageData = context.getImageData(imageX, imageY, imageWidth,
imageHeight);
var data = imageData.data;
for(var y = 0; y < imageHeight; y+= 4) {
for(var x = 0; x < imageWidth; x+= 4) {
var red = data[((imageWidth * y) + x) * 4];
var green = data[((imageWidth * y) + x) * 4 + 1];
var blue = data[((imageWidth * y) + x) * 4 + 2];
var alpha = data[((imageWidth * y) + x) * 4 + 3];
if (red < 100) {
var pX = (x % 500) - 249;
var pY = 249 - y;
array.push([pX, pY, red, green, blue, alpha]);
}
}
}
};
var addDestination = function(particle, x, y, z) {
var dest = new THREE.Vector3(x, y, z);
particle.destination = dest;
};
var addVelocity = function(particle) {
var xDiff = (particle.destination.x - particle.x) / 180;
var yDiff = (particle.destination.y - particle.y) / 180;
var zDiff = (particle.destination.z - particle.z) / 180;
var vel = new THREE.Vector3(xDiff, yDiff, zDiff);
particle.velocity = vel;
};
var move = function(particle) {
particle.x += particle.velocity.x;
particle.y += particle.velocity.y;
particle.z += particle.velocity.z;
particle.updated += 1;
};
var slowDown = function(particle) {
particle.velocity.x -= (particle.velocity.x / 300)
particle.velocity.y -= (particle.velocity.y / 300)
particle.velocity.z -= (particle.velocity.z / 160)
};
var resetProperties = function() {
var pCount = particleCount;
while (pCount--) {
var particle = particles.vertices[pCount];
particle.destination = null
particle.updated = 0;
};
};
function getRandomInt(min, max) {
return Math.floor(Math.random() * (max - min)) + min;
};
var distributedZ = function(level) {
var z;
if (level === 1) {
z = getRandomInt(50, 100);
} else if (level === 2) {
z = getRandomInt(350, 400);
} else {
z = getRandomInt(650, 700);
}
return z;
};
function shuffle(array) {
var currentIndex = array.length, temporaryValue, randomIndex;
while (0 !== currentIndex) {
randomIndex = Math.floor(Math.random() * currentIndex);
currentIndex -= 1;
temporaryValue = array[currentIndex];
array[currentIndex] = array[randomIndex];
array[randomIndex] = temporaryValue;
}
return array;
};
var disperse = function() {
pCount = particleCount;
for (var i = 0; i < pCount; i++) {
var particle = particles.vertices[i];
if (typeof(particle.destination) === "undefined") {
var nums = [-1, 1];
var x = particle.x + nums[Math.round(Math.random())];
var y = particle.y - 1000;
var z = Math.random() * 30 - 15;
addDestination(particle, x, y, z);
particle.velocity = new THREE.Vector3(x - particle.x, -3, z -
particle.z);
}
if (particle.updated <= 300) {
move(particle);
} else {
particles.vertices = shuffle(particles.vertices);
resetProperties();
dispersed = true;
return;
}
}
}
var morphImageParticles = function(imageParticles, rgba) {
for (var i = 0; i < imageParticles.length; i++) {
var particle = imageParticles[i]
if (particle.destination === null) {
var pixelData = rgba[i];
var x = pixelData[0];
var y = pixelData[1];
var z = Math.random() * 15 - 7;
addDestination(particle, x, y, z);
addVelocity(particle);
}
if (particle.updated <= 180) {
move(particle);
}
}
};
var morphOuterParticles = function(outerParticles, ord) {
for (var i = 0; i < outerParticles.length; i++) {
var nums = [-1, 1];
var particle = outerParticles[i];
if (particle.destination === null) {
var x = Math.random() * 1000 - 500;
var y = Math.random() * 1000 - 500;
var z;
if (i <= Math.round(outerParticles.length * 0.6)) {
z = distributedZ(1)
} else if (i > Math.round(outerParticles.length * 0.6) && i <
Math.round(outerParticles.length * 0.9)) {
z = distributedZ(2)
} else {
z = distributedZ(3);
}
addDestination(particle, x, y, z);
addVelocity(particle);
}
if (particle.updated <= 600) {
move(particle);
slowDown(particle);
} else {
particles.vertices = shuffle(particles.vertices);
resetProperties();
if (ord === 1) {
firstDone = true;
} else if (ord === 2) {
secondDone = true;
} else {
thirdDone = true;
}
return;
}
}
};
var makeImg = function(rgba, ord) {
var pCount = particleCount;
var imagePs = particles.vertices.slice(0, rgba.length);
var outerPs = particles.vertices.slice(rgba.length, pCount);
morphImageParticles(imagePs, rgba);
morphOuterParticles(outerPs, ord);
};
var update = function() {
if (thirdDone) {
} else if (secondDone) {
makeImg(tRgba, 3);
} else if (firstDone) {
makeImg(sRgba, 2);
} else if (dispersed) {
makeImg(fRgba, 1);
} else {
disperse();
}
particleSystem.geometry.verticesNeedUpdate = true;
renderer.render(scene, camera);
requestAnimationFrame(update);
TWEEN.update();
};
var rotXScale = d3.scale.linear().domain([0, window.innerHeight]).range([15,
-15]);
var rotYScale = d3.scale.linear().domain([0, window.innerWidth]).range([25,
-25]);
d3.select("body").on("mousemove", function() {
var scaledX = rotXScale(d3.mouse(this)[1]) * Math.PI / 180;
var scaledY = rotYScale(d3.mouse(this)[0]) * Math.PI / 180;
var tween = new TWEEN.Tween(particleSystem.rotation).to({ x: scaledX, y:
scaledY, z: 0 });
tween.easing( TWEEN.Easing.Quartic.Out);
tween.start();
transparency: true
});
var img1 = new Image();
var img2 = new Image();
var img3 = new Image();
img1.onload = function() {
drawImage(this, fRgba);
img2.onload = function() {
drawImage(this, sRgba);
img3.onload = function() {
drawImage(this, tRgba);
}
img3.src = "images/p1.png";
}
img2.src = "images/p2.png";
update();
}
img1.src = "images/p3.png";
update();
I thought I only need to add the code below, for example ['0xffffff'], that's how it should work at least but it didn't. Therefore I guess the problem is somewhere else in the script.
var fRgba = []; // first img rgba data
var sRgba = []; // second img rgba data
var tRgba = []; // third img rgba data

Using Web Audio API analyser on Chromecast

I'm trying to do some audio visualisation on the Chromecast receiver using Web Audio API.
Unfortunately, the following code, that works well on Chrome, always returns an array of zeros for getByteFrequencyData on the Chromecast.
$(function () {
var context = new webkitAudioContext();
var analyser = context.createAnalyser();
analyser.fftSize = 64;
analyser.minDecibels = -100;
analyser.maxDecibels = -30;
analyser.smoothingTimeConstant = 0.9;
var frequencyData = new Uint8Array(analyser.frequencyBinCount);
var visualisation = $("#visualisation");
var barSpacingPercent = 100 / analyser.frequencyBinCount;
for (var i = 0; i < analyser.frequencyBinCount; i++) {
$("<div/>").css("left", i * barSpacingPercent + "%")
.appendTo(visualisation);
}
var bars = $("#visualisation > div");
function update() {
requestAnimationFrame(update);
analyser.getByteFrequencyData(frequencyData);
bars.each(function (index, bar) {
bar.style.height = frequencyData[index] + 'px';
});
};
$('audio').bind('canplay', function() {
var source = context.createMediaElementSource(this);
source.connect(analyser);
analyser.connect(context.destination);
update();
});
});
Am I missing something or is this particular feature of Web Audio not supported on Chromecast?

Categories

Resources