Javascript Web Audio API AnalyserNode Not Working - javascript

The code is supposed to stream any url and provide a visualization of the audio. Unfortunately, the visualizer is not working. The visualization relies on the data from the AnalyzerNode, which is always returning empty data. Why doesn't the AnalyserNode in this code work? The numberOfOutputs on the source node increases after I .connect() them, but the numberOfInputs on the AnalyserNode does not change.
<html>
<head>
<script>
var context;
var source;
var analyser;
var canvas;
var canvasContext;
window.addEventListener('load', init, false);
function init() {
try {
// Fix up for prefixing
window.AudioContext = window.AudioContext || window.webkitAudioContext;
window.requestAnimationFrame = window.requestAnimationFrame || window.mozRequestAnimationFrame ||
window.webkitRequestAnimationFrame || window.msRequestAnimationFrame;
context = new AudioContext();
analyser = context.createAnalyser();
canvas = document.getElementById("analyser");
canvasContext = canvas.getContext('2d');
}
catch(e) {
alert(e);
alert('Web Audio API is not supported in this browser');
}
}
function streamSound(url) {
var audio = new Audio();
audio.src = url;
audio.controls = true;
audio.autoplay = true;
source = context.createMediaElementSource(audio);
source.connect(analyser);
analyser.connect(context.destination);
document.body.appendChild(document.createElement('br'));
document.body.appendChild(audio);
render();
}
function render(){
window.requestAnimationFrame(render);
//Get the Sound data
var freqByteData = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(freqByteData);
//we Clear the Canvas
canvasContext.clearRect(0, 0, canvas.width, canvas.height);
//draw visualization
for(var i=0;i<analyser.frequencyBinCount;i++){
canvasContext.fillRect(i*2,canvas.height,1,-freqByteData[i]);
//Data seems to always be zero
if(freqByteData[i] != 0) {
alert(freqByteData[i]);
}
}
}
</script>
</head>
<body>
<button onclick="streamSound(document.getElementById('url').value)"> Stream sound</button>
<input id="url" size='77' value="Enter a URL to Stream">
<br />
<canvas id="analyser" width="600" height="200"></canvas>
</body>
</html>

You should setup an event listener for the audio's canplay event and only setup the MediaElementSource and connect it after that event fires.
It also won't work in Safari due to a bug in their implementation. AFAIK Chrome is the only browser that properly supports the Web Audio API.

Related

Html canvas video streaming

I have this code which playing my camera's view in a video tag and then into canvas element at the same page, the page has 2 squares one for the video tag(playing camera's view) and the other is for canvas(playing the same video of the video tag). How I can edit my code to make the video play in a canvas at another page to let my user watch it live, and if there is a method to let users watch it at the same page not another page and see the canvas only, it's also accepted.
here is my html file:
<html>
<head>
<meta charset="UTF-8">
<title>With canvas</title>
</head>
<body>
<div class="booth">
<video id="video" width="400" height="300" autoplay></video>
<canvas id="canvas" width="400" height="300" style="border: 1px solid black;"></canvas>
</div>
<script src="video.js"></script>
</body>
</html>
And here is my js file:
(function() {
var canvas= document.getElementById('canvas'),
context = canvas.getContext('2d'),
video = document.getElementById('video'),
vendorUrl = window.URL || window.webkitURL;
navigator.getMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
navigator.getMedia ({
video: true,
audio: false
}, function(stream) {
//video.src = vendorUrl.createObjectURL(stream);
if ('srcObject' in video) {
video.srcObject = stream;
} else {
video.src = vendorUrl.createObjectURL(stream);
}
video.play();
}, function(error) {
// An error occured
//error.code
});
video.addEventListener('play', function() {
setInterval(() => {
draw(this, context, 400, 300);
}, 100);
}, false );
function draw(video, context, width, height) {
context.drawImage(video, 0, 0, width, height);
}
}) ();
use MediaRecorder to get live data in chunks as media recorder records from camera and give chunks that can be send over other end of user
then use mediasource to append chunks on other side of user to append it and play it live as it recieves new chunks
i know its a complicated thing but believe me when i was experimenting this i have spent good amount of time to understand i believe you will get some benefit from my experiment code below
here is a working demo
var mediasource = new MediaSource(),video = document.querySelector("video"),
mime = 'video/webm;codecs=vp9,opus'
video.src = URL.createObjectURL(mediasource)
mediasource.addEventListener("sourceopen",function(_){
var source = mediasource.addSourceBuffer(mime)
navigator.mediaDevices.getUserMedia({video: true,audio: true}).then(stream=>{
var recorder = new MediaRecorder(stream,{mimeType:mime})
recorder.ondataavailable = d =>{
// creating chunks of 5 seconds of video continiously
var r = new Response(d.data).arrayBuffer() // convert blob to arraybuffer
r.then(arraybuffer=>{
source.appendBuffer(arraybuffer)
})
}
recorder.start(5000)
})
})
<video class="video" controls autoplay></video>
separated version for gathering live data and playing it demo
// gathering camera data to liveData array
var vid = document.querySelector(".video"),
mime = 'video/webm;codecs=vp9,opus',
liveData = []
navigator.mediaDevices.getUserMedia({video:true,audio:true}).then(stream=>{
var recorder = new MediaRecorder(stream,{mimeType:mime})
recorder.ondataavailable = d =>{
console.log("capturing")
new Response(d.data).arrayBuffer().then(eachChunk=>{
// recording in chunks here and saving it to liveData array
liveData.push(eachChunk)
})
}
recorder.start(5000)
})
//--------------------------------------------------------------------------------
// playing from liveData in video
var ms = new MediaSource()
vid.src =URL.createObjectURL(ms)
ms.onsourceopen = function(){
var source = ms.addSourceBuffer(mime),chunkIndex = 0
setInterval(function(){
if(liveData.length > 0){
var currentChunk = liveData[chunkIndex]
source.appendBuffer(currentChunk)
console.log("playing")
chunkIndex++
}
},5000)
}

How to set playBackRate of HTMLAudioElement in combination with a SourceNode and Firefox?

I'm trying to test playing audio using a HTMLAudioElement and a AudioSourceNode. For the later application I need two features:
The pitch must be preserved after the playbackRate changed.
The volume needs to be changed to a value greater than 1.
Because feature 2 I added a workaround with the AudioSourceNode and the GainNode.
I need the audio file as ArrayBuffer in the later app that's why I added the file reading part before.
Problem:
The code works fine with Chrome and Opera, but not with Firefox. The playBackRate is set to 2, but the playbackRate of audio signal did not change. Why is that's the case and how can I fix it?
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Timestretching 2</title>
<script type="text/javascript">
var audioContext = window.AudioContext // Default
|| window.webkitAudioContext // Safari and old versions of Chrome
|| window.mozAudioContext
|| false;
function play() {
var fileInput = document.getElementById("file");
var file = fileInput.files[0];
var reader = new FileReader();
reader.onload = function (event) {
console.log("finished!");
console.log(event.srcElement.result);
var audioCtxt = new audioContext();
var url = URL.createObjectURL(new File([event.srcElement.result], "test.wav"));
var player = new Audio();
const source = audioCtxt.createMediaElementSource(player);
player.src = url;
console.log("wait to play");
player.addEventListener("canplay", function () {
// create a gain node to set volume greater than 1
const gainNode = audioCtxt.createGain();
gainNode.gain.value = 2.0; // double the volume
source.connect(gainNode);
gainNode.connect(audioCtxt.destination);
player.playbackRate = 2.0;
player.play();
player.playbackRate = 2.0;
console.log("playbackRate is " + player.playbackRate);
});
};
reader.onprogress = function (progress) {
console.log(progress.loaded / progress.total);
};
reader.onerror = function (error) {
console.error(error);
};
reader.readAsArrayBuffer(file);
}
</script>
</head>
<body>
<input id="file" type="file" value="Audio Datei auswählen"/>
<button onclick="play()">PLAY</button>
</body>
</html>

displaying my webcam in my webpage is not working

I am trying to display my webcam in my web page but the video is not showing and it is fetching this error (Failed to execute 'createObjectURL' on 'URL': No function was found that matched the signature provided.
at )
this is my java script
<script type ="text/javascript">
(function(){
var video = document.getElementById('video'),
vendorUrl = window.URL || window.webkitURL;
navigator.getMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
navigator.getMedia({
video: true,
audio: false
}, function(stream){
video.src = vendorUrl.createObjectURL(stream);
video.play();
}, function(error){
});
})();
</script>
I've read some documents about google removing the createObjectURL but I forgot the source of it, if it is true then what should I do to display my webcam video in my webpage
You can use p5js library. For security reasons this doesn't work in stackOverFlow code snippet.
Although you can test it on p5Js online editor or test it in your example. dont forget to add the libraries
this example
var myCanvas;
var w = 640;
var h = 480;
var myVideo;
var frame;
function setup(){
myCanvas = createCanvas(w,h);
myVideo = createCapture(VIDEO);
myVideo.id("whatever_id_name");
myVideo.size(myCanvas.width, myCanvas.height); //480p
}
function draw(){ //loop function
background(100); //Gray background - not visible
rect(100,100,60,30); //Create rect at position (100,100) with size (60,30)
//frameRate(1); //Default frame rate (30 frames per second)
//add the image of the capturedVideo every frame
//at position (100,100)
//with size have of the canvas dimensions - use 0,0 for default video size
frame = image(myVideo, 160,100,w/2,h/2);
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/0.7.3/p5.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/0.7.3/addons/p5.dom.js"></script>
You're trying to play raw data as a MediaSource stream.
To correct this, change:
video.src = vendorUrl.createObjectURL(stream);
video.play();
To:
video.srcObject = stream;
video.play();
In addition to this, note that navigator.getUserMedia is deprecated, and you should really be using navigator.mediaDevices.getUserMedia() instead:
navigator.getMedia = navigator.mediaDevices.getUserMedia()

how can i play piece of sound sample by web sound API?

I have an "asd.wav" sample with total duration 3 secs and play it:
let source = audioCtx.createBufferSource();
source.buffer = buffer; // recieved buffer of asd.wav
source.connect(audioCtx.destination);
source.start(0);
It plays perfectly from 0.00 to 3.00 second, but how i can play this sample only from 1.00 to 2.00 second?
This should do the trick. May be it can be done in a simpler way, but this what I could come up with.
var AudioContext = window.AudioContext || window.webkitAudioContext;
var audioCtx = new AudioContext();
var getSound = new XMLHttpRequest();
getSound.open("GET", "./asd.wav", true);
getSound.responseType = "arraybuffer";
getSound.onload = function() {
audioCtx.decodeAudioData(getSound.response, function(buffer) {
let start_time = 1, end_time = 2, sample_rate = buffer.sampleRate,
channel_number = 0; // assuming a mono (one channel) audio
let source = audioCtx.createBufferSource();
let data = buffer.getChannelData(channel_number);
data = data.slice(start_time * sample_rate, end_time * sample_rate)
let new_buffer = audioCtx.createBuffer(1 /*number of channels =1*/ , data.length, sample_rate);
new_buffer.copyToChannel(data, 0);
source.buffer = new_buffer
source.connect(audioCtx.destination);
source.start(0);
});
};
getSound.send();
In case of multi channel audio, you will need to repeat the steps to copy data to each channel.
In addition to explicitly requesting the resource to be played and slicing out a portion of it, you can also leverage an audio element. Per the docs, by appending #t=[starttime][,endtime] to the URL, you can specify the portion of interest.
From there, it's a trivial matter of creating a source from the media element and playing it, rather than doing it all from scratch.
As with an AJAX request, you're still subject to Cross-Origin restrictions.
Here's an example - just substitute the URL with one on the same domain, one referring to a resource with a CORS header, or one that uses your own server as a proxy to a compatible resource that doesn't come with the CORS header.
As you can see, the code for this method along with its accompanying HTML requires far less code than the AJAX approach does. You can also create and load audio elements dynamically as shown in the button click handler.
<!doctype html>
<html>
<head>
<script>
"use strict";
function byId(id){return document.getElementById(id)}
///////////////////////////////////
window.addEventListener('load', onDocLoaded, false);
function onDocLoaded(evt)
{
//loadAndPlayPortion("3 seconds.wav", 0.0, 1.0);
playAudioElement( byId('myAudioElem') );
};
function onBtnClicked(evt)
{
var audio = document.createElement('audio');
audio.onloadeddata = function(){ playAudioElement(this); };
audio.src = '3 seconds.wav#t=2,3'; // could build this URL from user input
}
function playAudioElement(audioElem)
{
var AudioContext = window.AudioContext || window.webkitAudioContext;
var audioCtx = new AudioContext();
var source = audioCtx.createMediaElementSource(audioElem);
source.connect(audioCtx.destination);
audioElem.play();
}
function loadAndPlayPortion(soundUrl, startTimeSecs, endTimeSecs)
{
var AudioContext = window.AudioContext || window.webkitAudioContext;
var audioCtx = new AudioContext();
var ajax = new XMLHttpRequest();
ajax.open("GET", soundUrl, true);
ajax.responseType = "arraybuffer";
ajax.onload = onFileLoaded;
ajax.send();
function onFileLoaded()
{
audioCtx.decodeAudioData(this.response, onDataDecoded);
}
function onDataDecoded(sampleBuffer)
{
let source = audioCtx.createBufferSource(), numChannels=sampleBuffer.numberOfChannels,
sampleRate = sampleBuffer.sampleRate,
nRequiredSamples = (endTimeSecs-startTimeSecs)*sampleRate,
newBuffer = audioCtx.createBuffer( numChannels, nRequiredSamples, sampleRate);
for (var curChannel=0; curChannel<numChannels; curChannel++)
{
var channelData = sampleBuffer.getChannelData(curChannel);
channelData = channelData.slice(startTimeSecs*sampleRate, endTimeSecs*sampleRate);
newBuffer.copyToChannel(channelData, curChannel, 0);
}
source.buffer = newBuffer; // chosen portion of received buffer of sound-file
source.connect(audioCtx.destination);
source.start(0);
}
}
</script>
<style>
</style>
</head>
<body>
<button onclick='onBtnClicked()'>Create Audio element</button>
<audio id='myAudioElem' src='3 seconds.wav#t=1,2'></audio>
</body>
</html>

Uploading a picture taken from a webcam

How do i upload a picture that i took from a webcam, i've found a code that worked (only on Chrome)
i've used the code, and when clic on Take picture, i got the picture but i dont see it on the source?
<video autoplay id="vid" style="display:none;"></video>
<canvas id="canvas" width="640" height="480" style="border:1px solid #d3d3d3;"></canvas><br>
<button onclick="snapshot()">Take Picture</button>
<script type="text/javascript">
var video = document.querySelector("#vid");
var canvas = document.querySelector('#canvas');
var ctx = canvas.getContext('2d');
var localMediaStream = null;
var onCameraFail = function (e) {
console.log('Camera did not work.', e);
};
function snapshot() {
if (localMediaStream) {
ctx.drawImage(video, 0, 0);
}
}
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia({video:true}, function (stream) {
video.src = window.URL.createObjectURL(stream);
localMediaStream = stream;
}, onCameraFail);
</script>
How do i add it to a form that have a picture element
<input name="avatar" class="avatar" type="file" required>
as you can see, the field is required, so if it is added to that field the form will see it as a non-empty?
Why it dont work on Opera? i use Opera 12.15
Update: i found how to convert to canvas to image, but dont know how to fill it in the image:
a = canvas.toDataURL("image/jpeg")
document.getElementById("avatar").src = a
if i use the javascript console, i get the source, but if i validate the form, the form thinks that the image is still missing and it is blank.
any ideas?
Build the SetInterval with snapshot() runing in 300ms.
The button only stop the interval, ;)
<button onclick="clearInterval(ivl)">Take Picture</button>
var ivl = setInterval(function(){ snapshot(); },300);

Categories

Resources