I'm trying to do some audio visualisation on the Chromecast receiver using Web Audio API.
Unfortunately, the following code, that works well on Chrome, always returns an array of zeros for getByteFrequencyData on the Chromecast.
$(function () {
var context = new webkitAudioContext();
var analyser = context.createAnalyser();
analyser.fftSize = 64;
analyser.minDecibels = -100;
analyser.maxDecibels = -30;
analyser.smoothingTimeConstant = 0.9;
var frequencyData = new Uint8Array(analyser.frequencyBinCount);
var visualisation = $("#visualisation");
var barSpacingPercent = 100 / analyser.frequencyBinCount;
for (var i = 0; i < analyser.frequencyBinCount; i++) {
$("<div/>").css("left", i * barSpacingPercent + "%")
.appendTo(visualisation);
}
var bars = $("#visualisation > div");
function update() {
requestAnimationFrame(update);
analyser.getByteFrequencyData(frequencyData);
bars.each(function (index, bar) {
bar.style.height = frequencyData[index] + 'px';
});
};
$('audio').bind('canplay', function() {
var source = context.createMediaElementSource(this);
source.connect(analyser);
analyser.connect(context.destination);
update();
});
});
Am I missing something or is this particular feature of Web Audio not supported on Chromecast?
Related
I have this little code snippet here
var context = new(window.AudioContext || window.webkitAudioContext)();
.....
var source = context.createMediaElementSource(audio);
var analyser = context.createAnalyser();
var canvas = document.getElementById("canvas");
var ctx = canvas.getContext("2d");
source.connect(analyser);
analyser.connect(context.destination);
analyser.fftSize = 256;
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
var WIDTH = canvas.width;
var HEIGHT = canvas.height + (canvas.height * 0.7);
var barWidth = (WIDTH / bufferLength) * 2.5;
var barHeight;
var x = 0;
function renderFrame() {
x = 0;
ctx.fillStyle = "#000";
ctx.fillRect(0, 0, WIDTH, HEIGHT);
analyser.getByteFrequencyData(dataArray);
for (var i = 0; i < bufferLength; i++) {
barHeight = dataArray[i];
var r = barHeight + (25 * (i / bufferLength));
var g = 250 * (i / bufferLength);
var b = 50;
ctx.fillStyle = "rgb(" + r + "," + g + "," + b + ")";
ctx.fillRect(x, HEIGHT - barHeight, barWidth, barHeight);
x += barWidth + 1;
}
requestAnimationFrame(renderFrame);
}
requestAnimationFrame(renderFrame);
audio.src = jQuery("source").attr("src");
audio.play();
Full code and demo here: https://jsfiddle.net/6gv39mk0/1/show
That basically shows a dynamic waveform when audio is played
It works on desktop browser, android browser, but no way to make it works on iPhone. Am I missing something or is there something that I must declare/do differently?
I've solved this issue with a transparent button on top of play button. this probably make iPhone browsers understand that there is an interaction. After that the button is hidden (z-index -1)
It's not that elegant as solution, but it work!
var button = document.createElement('button');
jQuery(button).attr('id', 'btn');
jQuery(canva).before(button);
jQuery(button).on('click', function() {
start();
jQuery(button).remove();
})
here it is the fiddle https://jsfiddle.net/L1trwz7o/
I am trying to change the color of an SVG image using audio from the microphone.
I have got the logic written, the color is updated on the SVG element, but nothing changes visually.
window.onload = function () {
if (navigator.getUserMedia) {
navigator.getUserMedia(
{ audio: true },
(stream) => {
var context = new AudioContext();
var src = context.createMediaStreamSource(stream);
var analyser = context.createAnalyser();
var svg = document.querySelector("#rangoli");
src.connect(analyser);
analyser.connect(context.destination);
analyser.fftSize = 256;
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
function renderFrame() {
requestAnimationFrame(renderFrame);
analyser.getByteFrequencyData(dataArray);
for (var i = 0; i < bufferLength; i++) {
barHeight = dataArray[i] / 2;
var r = barHeight + 25 * (i / bufferLength);
var g = 250 * (i / bufferLength);
var b = 50;
svg.setAttribute("fill", `rgb(${r}, ${g}, ${b})`);
}
}
renderFrame();
},
() => console.log("Error")
);
}
};
Here is the SVG updating the fill value:
Here is a CodePen demo:
https://codepen.io/GR8z/pen/qBjopZG
This is my first posted question, so be gentle lol. I am trying to make a little game that will display different animated sprites on a canvas element that is laid over a user's webcam feed. Right now I am just working with one sprite to get the functionality right. I only want the sprite to appear and follow the mouse when left-click is held down. Right now the sprite appears on mousedown and animates, but disappears if I move the mouse instead of following. I only want it to disappear on mouseup. I have looked all over for the answer, but haven't been able to find one. Here is the js I have:
'use strict';
const video = document.getElementById('video');
const canvas = document.getElementById('canvas');
const errorMsgElement = document.querySelector('span#errorMsg');
canvas.width = window.innerWidth;
canvas.height =window.innerHeight;
const constraints = {
// audio: true,
video: {
// width: 1280, height: 720
width: window.innerWidth,
height: window.innerHeight
}
};
// Access webcam
async function init() {
try {
const stream = await navigator.mediaDevices.getUserMedia(constraints);
handleSuccess(stream);
} catch (e) {
errorMsgElement.innerHTML = `navigator.getUserMedia error:${e.toString()}`;
}
}
// Success
function handleSuccess(stream) {
window.stream = stream;
video.srcObject = stream;
}
// Load init
init();
//Code for drawing sprite on Canvas
var image = new Image();
image.src = "Starsprite.png";
var ctx = canvas.getContext("2d");
let magic = false;
var spriteWidth = 187;
var spriteHeight = 60;
var rows = 1;
var cols = 3;
var width = spriteWidth/cols;
var height = spriteHeight/rows;
var curFrame = 0;
var frameCount = 3;
let x =0;
let y =0;
var srcX=0;
var srcY=0;
function startPosition(e){
magic =true;
x =e.clientX;
y =e.clientY;
}
function movePosition(e){
ctx.clearRect(x,y,width,height);
x =e.clientX;
y =e.clientY;
}
function endPosition(){
magic =false;
ctx.clearRect(x,y,width,height);
}
canvas.addEventListener("mousedown", startPosition);
canvas.addEventListener("mousemove",movePosition);
canvas.addEventListener("mouseup", endPosition);
canvas.addEventListener("mousedown",draw);
function updateFrame(){
curFrame = ++curFrame % frameCount;
srcX = curFrame * width;
}
function draw(){
if (!magic) return;
ctx.clearRect(x,y,width,height);
updateFrame();
ctx.drawImage(image,srcX,srcY,width,height, x,y,width,height);
console.log(x + ", " + y);
}
setInterval(draw,200);
The movePosition function does update the x & y coordinates, but the sprite doesn't show while the mouse is moving. I tried creating an event listener to run the draw function on mousemove, but it messes up the animation because it fires every time the browser updates the mouse position. Any ideas on tackling this problem?
Ok, so I have worked some more on it and animated the movement instead of trying to call the function over again. I based this on instructions for animating keystroke inputs. It works fairly well, but if I move the mouse very quickly to a new position it doesn't follow it all the way.
//Code for drawing on Canvas
var image = new Image();
image.src = "Starsprite.png";
var ctx = canvas.getContext("2d");
let magic = false;
var spriteWidth = 187;
var spriteHeight = 60;
var rows = 1;
var cols = 3;
var width = spriteWidth/cols;
var height = spriteHeight/rows;
var curFrame = 0;
var frameCount = 3;
let x =0;
let y =0;
let mousex =0;
let mousey =0;
var stillx =0;
var stilly =0;
var srcX=0;
var srcY=0;
var left =false;
var right =false;
var up = false;
var down = false;
var speed = 60;
var lift = 30;
var buffer = 100;
function startPosition(e){
magic =true;
x =e.clientX;
y =e.clientY;
}
function moveDirection(e){
stillx = x + buffer;
stilly = y + buffer;
mousex = e.clientX;
mousey = e.clientY;
if(mousex>stillx){
left = false;
right = true;
}
if(mousex<stillx){
left = true;
right = false;
}
if(mousey>stilly){
down = true;
up = false;
}
if(mousey<stilly){
down = false;
up = true;
}
}
function endPosition(){
magic =false;
ctx.clearRect(x,y,width,height);
}
canvas.addEventListener("mousedown", startPosition);
canvas.addEventListener("mousemove",moveDirection);
canvas.addEventListener("mouseup", endPosition);
canvas.addEventListener("mousedown",draw);
function moveSprite(){
if(right && x<canvas.width-width){
x+=speed;
// x+=speed;
right =false;
}
if(left && x>0){
x-=speed;
left=false;
}
if(down && y<canvas.height-height){
y+=lift;
down=false;
}
if(up && y>0){
y-=lift;
up=false;
}
}
function updateFrame(){
curFrame = ++curFrame % frameCount;
srcX = curFrame * width;
moveSprite();
}
function draw(){
if (!magic) return;
ctx.clearRect(x,y,width,height);
updateFrame();
ctx.drawImage(image,srcX,srcY,width,height, x,y,width,height);
}
setInterval(draw,200);
If anyone has a better solution please let me know!
How can you detect the pitch of a live audio input in the browser?
The below code will get you 1,024 frequency values. However I don't know how to go from this to actual pitches (e.g. A#).
const audioContext = new window.AudioContext();
const analyser = audioContext.createAnalyser();
navigator.getUserMedia(
{ audio: true },
stream => {
audioContext.createMediaStreamSource(stream).connect(analyser);
const dataArray = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteTimeDomainData(dataArray);
// Log the contents of the analyzer ever 500ms.
setInterval(() => {
console.log(dataArray.length);
}, 500);
},
err => console.log(err)
);
What you are currently accessing is the Time Domain Data, and can not be used to retrieve a note (which seems to be what you want).
What you'd want is the Frequency Domain, using AnalyserNode.get[XXX]FrequencyData, from which you could get which frequencies are louder or more quiet.
However, since most sound is made of harmonies, you can't retrieve what note was played from a microphone, add to this that we only have access to limited resolution, and not only will you be unable to retrieve a note from a microphone, but not even from a virtual oscillator either.
Below example has been made from this Q/A and examples from MDN;
const canvasCtx = canvas.getContext('2d');
const WIDTH = canvas.width = 500;
const HEIGHT = canvas.height = 150;
const audioCtx = new (window.AudioContext || window.webkitAudioContext);
const analyser = audioCtx.createAnalyser();
const nyquist = audioCtx.sampleRate / 2;
// highest precision
analyser.fftSize = 32768;
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
const osc = audioCtx.createOscillator();
osc.frequency.value = 400;
osc.connect(analyser);
osc.connect(audioCtx.destination);
range.oninput = e => {
osc.frequency.value = range.value;
};
if(!audioCtx.state || audioCtx.state === 'running') {
begin();
}
else {
log.textContent = 'click anywhere to begin';
onclick = e => {
onclick = null;
begin()
}
}
function begin() {
osc.start(0);
draw();
}
function draw() {
requestAnimationFrame(draw);
// get the Frequency Domain
analyser.getByteFrequencyData(dataArray);
canvasCtx.fillStyle = 'rgb(0, 0, 0)';
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
const barWidth = (WIDTH / bufferLength) * 2.5;
let max_val = -Infinity;
let max_index = -1;
let x = 0;
for(let i = 0; i < bufferLength; i++) {
let barHeight = dataArray[i];
if(barHeight > max_val) {
max_val = barHeight;
max_index = i;
}
canvasCtx.fillStyle = 'rgb(' + (barHeight+100) + ',50,50)';
canvasCtx.fillRect(x,HEIGHT-barHeight/2,barWidth,barHeight/2);
x += barWidth;
}
log.textContent = `loudest freq: ${max_index * (nyquist / bufferLength)}
real value: ${range.value}`;
}
#log{display: inline-block; margin:0 12px}
#canvas{display: block;}
<input id="range" type="range" min="0" max="1000" value="400"><pre id="log"></pre>
<canvas id="canvas"></canvas>
I am trying to render pdf onto the single canvas, I referred to the below link to implement the same.
Visit: Render .pdf to single Canvas using pdf.js and ImageData
var pdf = null;
PDFJS.disableWorker = true;
var pages = new Array();
var canvas = document.getElementById('the-canvas');
var context = canvas.getContext('2d');
var scale = 1.5;
var canvasWidth = 0;
var canvasHeight = 0;
var pageStarts = new Array();
pageStarts[0] = 0;
PDFJS.getDocument(url).then(function getPdfHelloWorld(_pdf) {
debugger;
pdf = _pdf;
//Render all the pages on a single canvas
for (var pNum = 1; pNum <= pdf.numPages; pNum++) {
pdf.getPage(pNum).then(function getPage(page) {
var viewport = page.getViewport(scale);
canvas.width = viewport.width;
canvas.height = viewport.height;
page.render({ canvasContext: context, viewport: viewport });
pages[pNum - 1] = context.getImageData(0, 0, canvas.width, canvas.height);
canvasHeight += canvas.height;
pageStarts[i] = pageStarts[i - 1] + canvas.height;
});
}
canvas.width = canvasWidth;
canvas.height = canvasHeight;
for (var i = 0; i < pages.length; i++) {
context.putImageData(pages[i], 0, pageStarts[i]);
}
});
I see space is created to render the page where as pdf is not displayed.
any help would greatly appreceated. Thanks.
Your code to store the pageStarts references "i" as if it was an iterator index, but it is in a for statement using pNum. I'm surprised that this code doesn't throw errors in the console pointing you to the possible solution. You should change:
canvasHeight += canvas.height;
pageStarts[i] = pageStarts[i - 1] + canvas.height;
to something like:
pageStarts[pNum - 1] = canvasHeight;
canvasHeight += canvas.height;
Notice that I reorganized the canvasHeight calculation until after you've grabbed the "last" value. This will allow you to determine the correct starting height for the current page image data without having to use the pageStart for the previous iteration.
This is an untested solution since you didn't post the rest of the code, but it should lead you towards your solution.
Here is multiple page view with textLayer
<script type="text/javascript">
window.onload = function () {
var url = '[[*pdf]]';
var scale = 1.1; //Set this to whatever you want. This is basically the "zoom" factor for the PDF.
var currPage = 1; //Pages are 1-based not 0-based
var numPages = 0;
var thePDF = null;
PDFJS.workerSrc = '/js/build/pdf.worker.js';
PDFJS.getDocument(url).then(function(pdf){
thePDF = pdf;
numPages = pdf.numPages;
pdf.getPage(1).then(handlePages);
});
function handlePages(page){
var viewport = page.getViewport(scale);
var pdfPage = document.createElement('div');
pdfPage.className = 'pdfPage';
var pdfContainer = document.getElementById('pdfContainer');
var canvas = document.createElement( "canvas" );
canvas.style.display = "block";
var context = canvas.getContext('2d');
var outputScale = getOutputScale(context);
canvas.width = (Math.floor(viewport.width) * outputScale.sx) | 0;
canvas.height = (Math.floor(viewport.height) * outputScale.sy) | 0;
context._scaleX = outputScale.sx;
context._scaleY = outputScale.sy;
if (outputScale.scaled) {
context.scale(outputScale.sx, outputScale.sy);
}
// The page, canvas and text layer elements will have the same size.
canvas.style.width = Math.floor(viewport.width) + 'px';
canvas.style.height = Math.floor(viewport.height) + 'px';
pdfPage.style.width = canvas.style.width;
pdfPage.style.height = canvas.style.height;
pdfPage.appendChild(canvas);
var textLayerDiv = document.createElement('div');
textLayerDiv.className = 'textLayer';
textLayerDiv.style.width = canvas.style.width;
textLayerDiv.style.height = canvas.style.height;
pdfPage.appendChild(textLayerDiv);
page.render({canvasContext: context, viewport: viewport});
var textLayerPromise = page.getTextContent().then(function (textContent) {
var textLayerBuilder = new TextLayerBuilder({
textLayerDiv: textLayerDiv,
viewport: viewport,
pageIndex: 0
});
textLayerBuilder.setTextContent(textContent);
});
pdfContainer.appendChild(pdfPage);
currPage++;
if ( thePDF !== null && currPage <= numPages ){thePDF.getPage( currPage ).then( handlePages );}
}
}
</script>