How to create an audio spectrum using JavaScript and HTML Canvas? - javascript

Ok, so I know how to animate a canvas using AnalyserNode. I made a demo of how I implemented it.
This is my demo ->
https://codesandbox.io/s/heuristic-lovelace-bmwxo?file=/src/Visualizer.js
What I am trying to understand is how do I make this look similar to this -> https://s3.us-west-1.amazonaws.com/storycreator.rendered/cka4ubx6d0dgb0114ws1rll7p?t=1590039817915
This audio spectrum was generated in After Effects using the audio spectrum effect.
I am using new Uint8Array(analyser.frequencyBinCount) the frequency feedback from the audio api. What is AE using under the hood to create the spectrum effect and is there a difference between spectrum and frequency in this context?
Here is the full code for JavaScript frequency
import React, { useEffect, useRef } from "react";
let frequencyArray = [];
let analyser;
const Visualizer = () => {
const canvasRef = useRef(null);
const requestRef = useRef(null);
const handleInit = () => {
initAudio();
requestRef.current = requestAnimationFrame(drawCanvas);
};
const initAudio = () => {
const audio = new Audio();
audio.src =
"https://s3.us-west-2.amazonaws.com/storycreator.uploads/ck9kpb5ss0xf90132mgf8z893?client_id=d8976b195733c213f3ead34a2d95d1c1";
audio.crossOrigin = "anonymous";
audio.load();
const context = new (window.AudioContext || window.webkitAudioContext)();
analyser = context.createAnalyser();
const source = context.createMediaElementSource(audio);
source.connect(analyser);
analyser.connect(context.destination);
frequencyArray = new Uint8Array(analyser.frequencyBinCount);
audio.play();
};
// draw the whole thing
const drawCanvas = () => {
if (canvasRef.current) {
const canvas = canvasRef.current;
const ctx = canvas.getContext("2d");
const radius = 200;
const bars = Math.round(canvas.width);
ctx.clearRect(0, 0, canvas.width, canvas.height);
analyser.getByteFrequencyData(frequencyArray);
for (var i = 0; i < bars; i++) {
const height = frequencyArray[i] * 0.25;
drawLine(
{
i,
bars,
height,
radius
},
canvas,
ctx
);
}
requestRef.current = requestAnimationFrame(drawCanvas);
}
};
// dray lines around the circle
const drawLine = (opts, canvas, ctx) => {
const { i, radius, bars, height } = opts;
const centerX = canvas.width / 2;
const centerY = canvas.height / 2;
const lineWidth = 10;
// draw the bar
ctx.strokeStyle = "#ddd";
ctx.lineWidth = lineWidth;
ctx.lineCap = "round";
ctx.beginPath();
ctx.moveTo(i, centerY);
ctx.lineTo(i, centerY + height);
ctx.stroke();
ctx.beginPath();
ctx.moveTo(i, centerY);
ctx.lineTo(i, centerY - height);
ctx.stroke();
};
return (
<>
<button onClick={handleInit}>Start Visualizer</button>
<canvas
ref={canvasRef}
style={{ background: "#f5f5f5" }}
width={window.innerWidth}
height={window.innerHeight}
/>
</>
);
};
export default Visualizer;

I think analyzer.getByteTimeDomainData() would be more appropriate.
A slightly modified version of your code(so that I could test it offline and because I am not familiar with React):
let frequencyArray = [];
let analyser;
let request;
var flag=0;
var height=0;
const canvas = document.getElementById("myCanvas");
const ctx = canvas.getContext("2d");
const bars = Math.round(canvas.width);
const lineWidth = 3;
var centerX = canvas.width / 2;
var centerY = canvas.height / 2;
const audio = new Audio();
audio.src =
"https://s3.us-west-2.amazonaws.com/storycreator.uploads/ck9kpb5ss0xf90132mgf8z893?client_id=d8976b195733c213f3ead34a2d95d1c1";
audio.crossOrigin = "anonymous";
audio.load();
const context = new (window.AudioContext || window.webkitAudioContext)();
analyser = context.createAnalyser();
const source = context.createMediaElementSource(audio);
source.connect(analyser);
analyser.connect(context.destination);
frequencyArray = new Uint8Array(analyser.frequencyBinCount);
document.getElementById('button').addEventListener('click', function() {
context.resume().then(() => {
console.log('Playback resumed successfully');
});
});
function begin()
{
audio.play();
requestAnimationFrame(drawCanvas);
};
function end()
{
cancelAnimationFrame(request);
audio.pause();
};
audio.addEventListener("ended", close);
function close()
{
if(flag==0)
{
flag=1;
}
else
{
ctx.clearRect(0, 0, canvas.width, canvas.height);
flag=0;
}
}
const drawCanvas = () => {
ctx.clearRect(0, 0, canvas.width, canvas.height);
analyser.getByteTimeDomainData(frequencyArray);
for (var i = 0; i < bars; i+=3) {
height = frequencyArray[i];
if(height<100)
{
height*=0.05;
}
else
{
if(height<200 && height>100)
{
height=(height-100)+(100*0.05)
}
else
{
height=(height-200)*0.2+(100*1.05);
}
}
drawLine(
{
i,
bars,
height
},
canvas,
ctx
);
}
if(flag==0)
{
request = requestAnimationFrame(drawCanvas);
}
else
{
flag=2;
close();
}
};
const drawLine = (opts, canvas, ctx) => {
const { i, bars, height } = opts;
// draw the bar
ctx.strokeStyle = "#212121";
ctx.lineWidth = lineWidth;
ctx.lineCap = "round";
ctx.beginPath();
ctx.moveTo(i, centerY);
ctx.lineTo(i, centerY + height);
ctx.stroke();
ctx.beginPath();
ctx.moveTo(i, centerY);
ctx.lineTo(i, centerY - height);
ctx.stroke();
};
<!DOCTYPE html>
<html>
<head>
<body>
<button id="button" onClick=begin()>Start</button>
<button onClick=end()>End</button>
<canvas id="myCanvas" width="500" height="500" style="border:1px solid #d3d3d3;">
Your browser does not support the HTML canvas tag.</canvas>
<script src = "wave.js">
</script>
</body>
</html>
Intermittently some sharp spikes emerge which looks bad. Perhaps someone else can fix that.

Related

Why is my program running in atom-html-preview and atom-live-server inside Atom but not in Chrome nor in JSFiddle?

I have a program that uses Web Audio API to visualize the Time Domain waveform of audio. It works fine inside Atom where I use the "atom-html-preview" package by "harmsk", but it doesn't run in Chrome or in JSFiddle.
What is it that I am doing wrong? Is it a syntax error or am I using outdated functions?
EDIT: It also works in "atom-live-server" by "jas-chen"
In Atom, the output looks like this
let frequencyArray = [];
let analyser;
let request;
var flag=0;
var height=0;
const canvas = document.getElementById("myCanvas");
const ctx = canvas.getContext("2d");
const bars = Math.round(canvas.width);
const lineWidth = 3;
var centerX = canvas.width / 2;
var centerY = canvas.height / 2;
const audio = new Audio();
audio.src =
"https://s3.us-west-2.amazonaws.com/storycreator.uploads/ck9kpb5ss0xf90132mgf8z893?client_id=d8976b195733c213f3ead34a2d95d1c1";
audio.crossOrigin = "anonymous";
audio.load();
const context = new (window.AudioContext || window.webkitAudioContext)();
analyser = context.createAnalyser();
const source = context.createMediaElementSource(audio);
source.connect(analyser);
analyser.connect(context.destination);
frequencyArray = new Uint8Array(analyser.frequencyBinCount);
function begin()
{
audio.play();
requestAnimationFrame(drawCanvas);
};
function end()
{
cancelAnimationFrame(request);
audio.pause();
};
audio.addEventListener("ended", close);
function close()
{
if(flag==0)
{
flag=1;
}
else
{
ctx.clearRect(0, 0, canvas.width, canvas.height);
flag=0;
}
}
const drawCanvas = () => {
ctx.clearRect(0, 0, canvas.width, canvas.height);
analyser.getByteTimeDomainData(frequencyArray);
for (var i = 0; i < bars; i+=5) {
height = frequencyArray[i] * 0.25;
drawLine(
{
i,
bars,
height
},
canvas,
ctx
);
}
if(flag==0)
{
request = requestAnimationFrame(drawCanvas);
}
else
{
flag=2;
close();
}
};
const drawLine = (opts, canvas, ctx) => {
const { i, bars, height } = opts;
// draw the bar
ctx.strokeStyle = "#212121";
ctx.lineWidth = lineWidth;
ctx.lineCap = "round";
ctx.beginPath();
ctx.moveTo(i, centerY);
ctx.lineTo(i, centerY + height);
ctx.stroke();
ctx.beginPath();
ctx.moveTo(i, centerY);
ctx.lineTo(i, centerY - height);
ctx.stroke();
};
<!DOCTYPE html>
<html>
<head>
<body>
<button onClick=begin()>Start</button>
<button onClick=end()>End</button>
<canvas id="myCanvas" width="500" height="200" style="border:1px solid #d3d3d3;">
</canvas>
</body>
</html>
I assume it's because of this error, in the console
js:47 The AudioContext was not allowed to start. It must be resumed (or created) after a user gesture on the page.
If you refer to the link in the message, it specifies that
If you create your AudioContext on page load, you’ll have to call resume() at some time after the user interacted with the page (e.g., user clicked a button). Alternatively, the AudioContext will be resumed after a user gesture if start() is called on any attached node.
Since your AudioContext is created before the user gesture (the click), you could modify begin function to resume context,
function begin()
{
context.resume().then(() => {
audio.play();
requestAnimationFrame(drawCanvas);
});
};
Working Demo:
let frequencyArray = [];
let analyser;
let request;
var flag=0;
var height=0;
const canvas = document.getElementById("myCanvas");
const ctx = canvas.getContext("2d");
const bars = Math.round(canvas.width);
const lineWidth = 3;
var centerX = canvas.width / 2;
var centerY = canvas.height / 2;
const audio = new Audio();
audio.src =
"https://s3.us-west-2.amazonaws.com/storycreator.uploads/ck9kpb5ss0xf90132mgf8z893?client_id=d8976b195733c213f3ead34a2d95d1c1";
audio.crossOrigin = "anonymous";
audio.load();
const context = new (window.AudioContext || window.webkitAudioContext)();
analyser = context.createAnalyser();
const source = context.createMediaElementSource(audio);
source.connect(analyser);
analyser.connect(context.destination);
frequencyArray = new Uint8Array(analyser.frequencyBinCount);
function begin()
{
context.resume().then(() => {
audio.play();
requestAnimationFrame(drawCanvas);
})
};
function end()
{
cancelAnimationFrame(request);
audio.pause();
};
audio.addEventListener("ended", close);
function close()
{
if(flag==0)
{
flag=1;
}
else
{
ctx.clearRect(0, 0, canvas.width, canvas.height);
flag=0;
}
}
const drawCanvas = () => {
ctx.clearRect(0, 0, canvas.width, canvas.height);
analyser.getByteTimeDomainData(frequencyArray);
for (var i = 0; i < bars; i+=5) {
height = frequencyArray[i] * 0.25;
drawLine(
{
i,
bars,
height
},
canvas,
ctx
);
}
if(flag==0)
{
request = requestAnimationFrame(drawCanvas);
}
else
{
flag=2;
close();
}
};
const drawLine = (opts, canvas, ctx) => {
const { i, bars, height } = opts;
// draw the bar
ctx.strokeStyle = "#212121";
ctx.lineWidth = lineWidth;
ctx.lineCap = "round";
ctx.beginPath();
ctx.moveTo(i, centerY);
ctx.lineTo(i, centerY + height);
ctx.stroke();
ctx.beginPath();
ctx.moveTo(i, centerY);
ctx.lineTo(i, centerY - height);
ctx.stroke();
};
<!DOCTYPE html>
<html>
<head>
<body>
<button onClick=begin()>Start</button>
<button onClick=end()>End</button>
<canvas id="myCanvas" width="500" height="200" style="border:1px solid #d3d3d3;">
</canvas>
</body>
</html>

Canvas of webcam blinking during predictions (javascript)

I draw my webcam stream on a canvas. I will apply my prediction rectangle bounding box on this canvas at each frame. Before starting prediction the canvas stream is prefect but as soon as predictions start the canvas starts to blink; I mean the whole canvas shows up and disappear, shows up and disappear very fast. do U have any idea how could I make it right? thanks a lot.
my prediction function looks like this:
let isPredicting = false;
async function predict() {
ui.isPredicting();
while (isPredicting) {
// Capture the frame from the webcam.
let video = document.getElementById('predwebcam');
let width=224,
height=224,
frameRate=10;
navigator.mediaDevices.getUserMedia({
video: {
width: width,
height:height,
frameRate:frameRate
}
}
).then(function(stream) {
video.srcObject = stream;
video.onloadedmetadata = function(e) {
video.play();
video.addEventListener('play', function() {
}, false);
};
}).catch(function(err) {
console.log('failed to load webcam')
});
var canvas = document.getElementById('predcanvas');
canvas.width = 500;
canvas.height = 500;
var context = canvas.getContext('2d');
/*
context.drawImage(video, 0, 0, canvas.width, canvas.height);
*/
// video 'play' event listener
video.addEventListener('play', function() {
//context.drawImage(this, 0, 0, canvas.width, canvas.height);
//draw(video, canvas, context, frameRate);
}, false);
function draw(video, canvas, context, frameRate) {
context.drawImage(video, 0, 0, canvas.width, canvas.height);
setTimeout(draw, 1/frameRate, video, canvas, context, frameRate);
}
//draw(video, canvas, context, frameRate);
const img = await getImage();
// Make a prediction through mobilenet, getting the internal activation of
// the mobilenet model, i.e., "embeddings" of the input images.
//const embeddings = truncatedMobileNet.predict(img);
console.log('starting to predict')
// Make a prediction through our newly-trained model using the embeddings
// from mobilenet as input.
const predictions = model.predict(img);
const values = predictions.dataSync();
const arr = Array.from(values);
console.log(arr)
let u = document.getElementById("1label").value;
dict.add(classindex1, u);
if (arr[0]>= 0){
arr[0]=1;
}
var Id = arr[0];
let ClassID = dict.findAt(Id);
//console.log(ClassID)
let startX = arr[1];
let startY = arr[2];
let w = arr[3];
let h = arr[4];
// Draw the bounding box.
context.strokeStyle = 'red';
//context.shadowColor = '#d53';
//context.shadowBlur = 20;
context.lineJoin = 'bevel';
context.lineWidth = 10;
context.strokeStyle = '#38f';
context.strokeRect(startX, startY, w, h);
// Draw the label background.
const font = "24px helvetica";
context.font = font;
context.textBaseline = "top";
context.fillStyle = "#2fff00";
const textWidth = context.measureText(ClassID).width;
const textHeight = parseInt(font, 10);
// draw top left rectangle
context.fillRect(startX, startY, textWidth + 10, textHeight + 10);
// draw bottom left rectangle
//context.fillRect(startX, startY + h - textHeight, textWidth + 15, textHeight + 10);
// Draw the text last to ensure it's on top.
context.fillStyle = "#000000";
context.fillText(ClassID, startX, startY);
//context.fillText(prediction.score.toFixed(2), startX, startY + h - textHeight);
// Returns the index with the maximum probability. This number corresponds
// to the class the model thinks is the most probable given the input.
//const predictedClass = predictions.as1D().argMax();
//const classId = (await predictedClass.data())[0];
img.dispose();
//ui.predictClass(metaClasses[classId]);
await tf.nextFrame();
}
ui.donePredicting();
}

Diagonal lines on canvas are drawing with different color/opacity

I'm drawing diagonal lines on an HTML canvas with a specific size, but some of them appear to have different color/opacity than the others. I would like them to have the same color/opacity.
Diagonal lines picture
The code I'm using to generate this output is the following:
let artist = {
step: 50,
distanceBetweenLines: 10,
verticalDifferenceInLines: 150,
}
window.onload = function () {
canv = document.getElementById("gc");
ctx = canv.getContext("2d");
ctx.translate(0.5, 0.5);
ctx.lineWidth = "1";
ctx.strokeStyle = "black";
draw();
}
function draw () {
let step = artist.step;
let d = artist.distanceBetweenLines;
let v = artist.verticalDifferenceInLines;
for (let x = 0; x < 500; x += step) {
for (let y = 0; y < 500; y += step) {
let increment = 30;
line({x:x, y: y}, {x:x+increment, y: y+increment});
}
}
}
function line(init, end) {
ctx.beginPath();
ctx.moveTo(init.x, init.y);
ctx.lineTo(end.x, end.y);
ctx.stroke();
}
Why I'm getting this effect on some of the lines?
This is a chrome bug. I opened an issue here.
That's basically a problem with Hardware Acceleration, if you disable it, it will render nicely even in Chrome.
To workaround the issue, you can compose a single bigger path which will contain all your lines and call stroke() only once:
let artist = {
step: 50,
distanceBetweenLines: 10,
verticalDifferenceInLines: 150,
}
window.onload = function() {
canv = document.getElementById("gc");
ctx = canv.getContext("2d");
ctx.translate(0.5, 0.5);
ctx.lineWidth = "1";
ctx.strokeStyle = "black";
draw();
}
function draw() {
let step = artist.step;
let d = artist.distanceBetweenLines;
let v = artist.verticalDifferenceInLines;
// a single big path
ctx.beginPath();
for (let x = 0; x < 500; x += step) {
for (let y = 0; y < 500; y += step) {
let increment = 30;
line({
x: x,
y: y
}, {
x: x + increment,
y: y + increment
});
}
}
// stroke only once
ctx.stroke();
}
function line(init, end) {
ctx.moveTo(init.x, init.y);
ctx.lineTo(end.x, end.y);
}
<canvas id="gc" width="500" height="500"></canvas>
But for this exact drawing, it would even be better to use a CanvasPattern:
// An helper function to create CanvasPatterns
// returns a 2DContext on which a simple `finalize` method is attached
// method which does return a CanvasPattern from the underlying canvas
function patternMaker( width, height ) {
const canvas = document.createElement( 'canvas' );
canvas.width = width;
canvas.height = height;
const ctx = canvas.getContext( '2d' );
ctx.finalize = (repetition = "repeat") => ctx.createPattern( canvas, repetition );
return ctx;
}
const canvas = document.getElementById("gc");
const ctx = canvas.getContext("2d");
const step = 50;
const offset = 30;
const patt_maker = patternMaker( step, step );
patt_maker.translate( 0.5, 0.5 );
patt_maker.moveTo( 0, 0 );
patt_maker.lineTo( offset, offset );
patt_maker.stroke();
const patt = patt_maker.finalize();
ctx.fillStyle = patt;
ctx.fillRect( 0, 0, canvas.width, canvas.height );
<canvas id="gc" width="500" height="500"></canvas>

Javascript canvas path not rendering

I'm new to canvas and I'm trying to create a small game. I'm trying to render some shapes on the screen but for some reason the shapes don't appear. The methods are being called and shapes get added to the array but nothing renders on the screen.
const canvas = document.getElementsByClassName('gameContainer');
const context = canvas[0].getContext('2d');
let canvasWidth = window.innerWidth;
let canvasHeight = window.innerHeight;
let shapes = [];
class Shape {
constructor(x, y, rad) {
this.x = x;
this.y = y;
this.rad = rad;
}
createShape() {
context.beginPath();
context.arc(this.x, this.y, this.rad, 0, 2 * Math.PI, false);
context.fillStyle = '#000';
context.fill();
}
}
class CanvasState {
constructor() {
this.canvas = canvas[0];
this.canvas.height = canvasHeight;
this.canvas.width = canvasWidth;
this.shapes = shapes;
}
addShape() {
setInterval(function() {
const randomRad = randomNum(10, 100);
const shape = new Shape(randomNum(randomRad, canvasWidth - randomRad), -randomRad, randomRad);
shape.createShape(this.context);
shapes.push(shape);
}, 1000);
}
}
function randomNum(min, max) {
Math.round(Math.random() * (max - min) + min);
}
function init() {
const cvs = new CanvasState();
cvs.addShape();
}
window.onload = function() {
init();
}
Any help given is greatly appreciated, thanks!
Assuming you have a working implementation of randomNum, the following lines result in the y-coordinate always being negative:
const randomRad = randomNum(10, 100);
const shape = new Shape(randomNum(randomRad, canvasWidth - randomRad), -randomRad, randomRad);
So all the shapes get drawn off screen.

Need assistance with audio visualizer using HTML5 canvas + web audio API

I'm looking at making a simple audio visualizer using canvas and the web audio API. I found this example online and want to learn more in depth to what exactly each part of the code is doing. I have some understanding to a few things.
Also importantly I also wish to change the canvas background color but have yet been successful in trying to do so.
var analyser, canvas, ctx, random = Math.random, circles = [];
window.onload = function() {
canvas = document.createElement('canvas');
canvas.width = window.innerWidth;
canvas.height = window.innerHeight;
document.body.appendChild(canvas);
ctx = canvas.getContext('2d');
setupWebAudio();
for (var i = 0; i < 280; i++) {
circles[i] = new Circle();
circles[i].draw();
}
draw();
};
function setupWebAudio() {
var audio = document.createElement('audio');
audio.src = 'flume.mp3';
document.body.appendChild(audio);
var audioContext = new AudioContext();
analyser = audioContext.createAnalyser();
var source = audioContext.createMediaElementSource(audio);
source.connect(analyser);
analyser.connect(audioContext.destination);
audio.play();
}
function draw() {
requestAnimationFrame(draw);
var freqByteData = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(freqByteData);
ctx.clearRect(0, 0, canvas.width, canvas.height);
for (var i = 1; i < circles.length; i++) {
circles[i].radius = freqByteData[i] * 1;
circles[i].y = circles[i].y > canvas.height ? 0 : circles[i].y + 1;
circles[i].draw();
}
}
function getRandomColor(){
return random() * 1 >> 0;
}
function Circle() {
this.x = random() * canvas.width;
this.y = random() * canvas.height;
this.radius = random() * 20 + 20;
this.color = 'rgb(' + getRandomColor() + ',' + getRandomColor() + ',' +
getRandomColor() + ')';
}
Circle.prototype.draw = function() {
var that = this;
ctx.save();
ctx.beginPath();
ctx.globalAlpha = random() / 8 + 0.2;
ctx.arc(that.x, that.y, that.radius, 0, Math.PI * 2);
ctx.fillStyle = this.color;
ctx.fill();
ctx.restore();
}
Change the color in draw() function.
This lines creates a rectangle as the background.
ctx.clearRect(0, 0, canvas.width, canvas.height);
Just before it, set the color, e.g.:
ctx.fillStyle = rgb(1, 2, 3);
ctx.clearRect(0, 0, canvas.width, canvas.height);

Categories

Resources